All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.hive.ql.parse.HiveParser Maven / Gradle / Ivy

There is a newer version: 4.0.1
Show newest version
// $ANTLR 3.5.2 org/apache/hadoop/hive/ql/parse/HiveParser.g 2018-05-18 11:39:38

package org.apache.hadoop.hive.ql.parse;

import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;


import org.antlr.runtime.*;
import java.util.Stack;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;

import org.antlr.runtime.tree.*;


/**
   Licensed to the Apache Software Foundation (ASF) under one or more 
   contributor license agreements.  See the NOTICE file distributed with 
   this work for additional information regarding copyright ownership.
   The ASF licenses this file to You under the Apache License, Version 2.0
   (the "License"); you may not use this file except in compliance with 
   the License.  You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
*/
@SuppressWarnings("all")
public class HiveParser extends Parser {
	public static final String[] tokenNames = new String[] {
		"", "", "", "", "AMPERSAND", "BITWISEOR", "BITWISEXOR", 
		"ByteLengthFullLiteral", "ByteLengthLiteral", "COLON", "COMMA", "CONCATENATE", 
		"CharSetLiteral", "CharSetName", "DIV", "DIVIDE", "DOLLAR", "DOT", "Digit", 
		"EQUAL", "EQUAL_NS", "Exponent", "GREATERTHAN", "GREATERTHANOREQUALTO", 
		"HexDigit", "Identifier", "IntegralLiteral", "KW_ABORT", "KW_ACTIVATE", 
		"KW_ACTIVE", "KW_ADD", "KW_ADMIN", "KW_AFTER", "KW_ALL", "KW_ALLOC_FRACTION", 
		"KW_ALTER", "KW_ANALYZE", "KW_AND", "KW_ANY", "KW_APPLICATION", "KW_ARCHIVE", 
		"KW_ARRAY", "KW_AS", "KW_ASC", "KW_AUTHORIZATION", "KW_AUTOCOMMIT", "KW_BEFORE", 
		"KW_BETWEEN", "KW_BIGINT", "KW_BINARY", "KW_BOOLEAN", "KW_BOTH", "KW_BUCKET", 
		"KW_BUCKETS", "KW_BY", "KW_CACHE", "KW_CASCADE", "KW_CASE", "KW_CAST", 
		"KW_CHANGE", "KW_CHAR", "KW_CHECK", "KW_CLUSTER", "KW_CLUSTERED", "KW_CLUSTERSTATUS", 
		"KW_COLLECTION", "KW_COLUMN", "KW_COLUMNS", "KW_COMMENT", "KW_COMMIT", 
		"KW_COMPACT", "KW_COMPACTIONS", "KW_COMPUTE", "KW_CONCATENATE", "KW_CONF", 
		"KW_CONSTRAINT", "KW_CONTINUE", "KW_CREATE", "KW_CROSS", "KW_CUBE", "KW_CURRENT", 
		"KW_CURRENT_DATE", "KW_CURRENT_TIMESTAMP", "KW_CURSOR", "KW_DATA", "KW_DATABASE", 
		"KW_DATABASES", "KW_DATE", "KW_DATETIME", "KW_DAY", "KW_DBPROPERTIES", 
		"KW_DECIMAL", "KW_DEFAULT", "KW_DEFERRED", "KW_DEFINED", "KW_DELETE", 
		"KW_DELIMITED", "KW_DEPENDENCY", "KW_DESC", "KW_DESCRIBE", "KW_DETAIL", 
		"KW_DIRECTORIES", "KW_DIRECTORY", "KW_DISABLE", "KW_DISTINCT", "KW_DISTRIBUTE", 
		"KW_DO", "KW_DOUBLE", "KW_DOW", "KW_DROP", "KW_DUMP", "KW_ELEM_TYPE", 
		"KW_ELSE", "KW_ENABLE", "KW_END", "KW_ENFORCED", "KW_ESCAPED", "KW_EXCEPT", 
		"KW_EXCHANGE", "KW_EXCLUSIVE", "KW_EXISTS", "KW_EXPLAIN", "KW_EXPORT", 
		"KW_EXPRESSION", "KW_EXTENDED", "KW_EXTERNAL", "KW_EXTRACT", "KW_FALSE", 
		"KW_FETCH", "KW_FIELDS", "KW_FILE", "KW_FILEFORMAT", "KW_FIRST", "KW_FLOAT", 
		"KW_FLOOR", "KW_FOLLOWING", "KW_FOR", "KW_FOREIGN", "KW_FORMAT", "KW_FORMATTED", 
		"KW_FROM", "KW_FULL", "KW_FUNCTION", "KW_FUNCTIONS", "KW_GRANT", "KW_GROUP", 
		"KW_GROUPING", "KW_HAVING", "KW_HOUR", "KW_IDXPROPERTIES", "KW_IF", "KW_IMPORT", 
		"KW_IN", "KW_INDEX", "KW_INDEXES", "KW_INNER", "KW_INPATH", "KW_INPUTDRIVER", 
		"KW_INPUTFORMAT", "KW_INSERT", "KW_INT", "KW_INTERSECT", "KW_INTERVAL", 
		"KW_INTO", "KW_IS", "KW_ISOLATION", "KW_ITEMS", "KW_JAR", "KW_JOIN", "KW_KEY", 
		"KW_KEYS", "KW_KEY_TYPE", "KW_KILL", "KW_LAST", "KW_LATERAL", "KW_LEFT", 
		"KW_LESS", "KW_LEVEL", "KW_LIKE", "KW_LIMIT", "KW_LINES", "KW_LOAD", "KW_LOCAL", 
		"KW_LOCATION", "KW_LOCK", "KW_LOCKS", "KW_LOGICAL", "KW_LONG", "KW_MACRO", 
		"KW_MANAGEMENT", "KW_MAP", "KW_MAPJOIN", "KW_MAPPING", "KW_MATCHED", "KW_MATERIALIZED", 
		"KW_MERGE", "KW_METADATA", "KW_MINUS", "KW_MINUTE", "KW_MONTH", "KW_MORE", 
		"KW_MOVE", "KW_MSCK", "KW_NONE", "KW_NORELY", "KW_NOSCAN", "KW_NOT", "KW_NOVALIDATE", 
		"KW_NULL", "KW_NULLS", "KW_OF", "KW_OFFSET", "KW_ON", "KW_ONLY", "KW_OPERATOR", 
		"KW_OPTION", "KW_OR", "KW_ORDER", "KW_OUT", "KW_OUTER", "KW_OUTPUTDRIVER", 
		"KW_OUTPUTFORMAT", "KW_OVER", "KW_OVERWRITE", "KW_OWNER", "KW_PARTITION", 
		"KW_PARTITIONED", "KW_PARTITIONS", "KW_PATH", "KW_PERCENT", "KW_PLAN", 
		"KW_PLANS", "KW_PLUS", "KW_POOL", "KW_PRECEDING", "KW_PRECISION", "KW_PRESERVE", 
		"KW_PRIMARY", "KW_PRINCIPALS", "KW_PROCEDURE", "KW_PURGE", "KW_QUARTER", 
		"KW_QUERY", "KW_QUERY_PARALLELISM", "KW_RANGE", "KW_READ", "KW_READS", 
		"KW_REBUILD", "KW_RECORDREADER", "KW_RECORDWRITER", "KW_REDUCE", "KW_REFERENCES", 
		"KW_REGEXP", "KW_RELOAD", "KW_RELY", "KW_RENAME", "KW_REOPTIMIZATION", 
		"KW_REPAIR", "KW_REPL", "KW_REPLACE", "KW_REPLICATION", "KW_RESOURCE", 
		"KW_RESTRICT", "KW_REVOKE", "KW_REWRITE", "KW_RIGHT", "KW_RLIKE", "KW_ROLE", 
		"KW_ROLES", "KW_ROLLBACK", "KW_ROLLUP", "KW_ROW", "KW_ROWS", "KW_SCHEDULING_POLICY", 
		"KW_SCHEMA", "KW_SCHEMAS", "KW_SECOND", "KW_SELECT", "KW_SEMI", "KW_SERDE", 
		"KW_SERDEPROPERTIES", "KW_SERVER", "KW_SET", "KW_SETS", "KW_SHARED", "KW_SHOW", 
		"KW_SHOW_DATABASE", "KW_SKEWED", "KW_SMALLINT", "KW_SNAPSHOT", "KW_SORT", 
		"KW_SORTED", "KW_SSL", "KW_START", "KW_STATISTICS", "KW_STATUS", "KW_STORED", 
		"KW_STREAMTABLE", "KW_STRING", "KW_STRUCT", "KW_SUMMARY", "KW_SYNC", "KW_TABLE", 
		"KW_TABLES", "KW_TABLESAMPLE", "KW_TBLPROPERTIES", "KW_TEMPORARY", "KW_TERMINATED", 
		"KW_THEN", "KW_TIME", "KW_TIMESTAMP", "KW_TIMESTAMPLOCALTZ", "KW_TINYINT", 
		"KW_TO", "KW_TOUCH", "KW_TRANSACTION", "KW_TRANSACTIONS", "KW_TRANSFORM", 
		"KW_TRIGGER", "KW_TRUE", "KW_TRUNCATE", "KW_UNARCHIVE", "KW_UNBOUNDED", 
		"KW_UNDO", "KW_UNION", "KW_UNIONTYPE", "KW_UNIQUE", "KW_UNIQUEJOIN", "KW_UNLOCK", 
		"KW_UNMANAGED", "KW_UNSET", "KW_UNSIGNED", "KW_UPDATE", "KW_URI", "KW_USE", 
		"KW_USER", "KW_USING", "KW_UTC", "KW_UTCTIMESTAMP", "KW_VALIDATE", "KW_VALUES", 
		"KW_VALUE_TYPE", "KW_VARCHAR", "KW_VECTORIZATION", "KW_VIEW", "KW_VIEWS", 
		"KW_WAIT", "KW_WEEK", "KW_WHEN", "KW_WHERE", "KW_WHILE", "KW_WINDOW", 
		"KW_WITH", "KW_WORK", "KW_WORKLOAD", "KW_WRITE", "KW_YEAR", "KW_ZONE", 
		"LCURLY", "LESSTHAN", "LESSTHANOREQUALTO", "LINE_COMMENT", "LPAREN", "LSQUARE", 
		"Letter", "MINUS", "MOD", "NOTEQUAL", "Number", "NumberLiteral", "PLUS", 
		"QUERY_HINT", "QUESTION", "QuotedIdentifier", "RCURLY", "RPAREN", "RSQUARE", 
		"RegexComponent", "SEMICOLON", "STAR", "StringLiteral", "TILDE", "TimeFullLiteral", 
		"WS", "KW_BATCH", "KW_DAYOFWEEK", "KW_HOLD_DDLTIME", "KW_IGNORE", "KW_NO_DROP", 
		"KW_OFFLINE", "KW_PROTECTION", "KW_READONLY", "KW_TIMESTAMPTZ", "TOK_ABORT_TRANSACTIONS", 
		"TOK_ACTIVATE", "TOK_ADD_TRIGGER", "TOK_ADMIN_OPTION_FOR", "TOK_ALIASLIST", 
		"TOK_ALLCOLREF", "TOK_ALLOC_FRACTION", "TOK_ALTERDATABASE_LOCATION", "TOK_ALTERDATABASE_OWNER", 
		"TOK_ALTERDATABASE_PROPERTIES", "TOK_ALTERTABLE", "TOK_ALTERTABLE_ADDCOLS", 
		"TOK_ALTERTABLE_ADDCONSTRAINT", "TOK_ALTERTABLE_ADDPARTS", "TOK_ALTERTABLE_ARCHIVE", 
		"TOK_ALTERTABLE_BUCKETS", "TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION", "TOK_ALTERTABLE_CLUSTER_SORT", 
		"TOK_ALTERTABLE_COMPACT", "TOK_ALTERTABLE_DROPCONSTRAINT", "TOK_ALTERTABLE_DROPPARTS", 
		"TOK_ALTERTABLE_DROPPROPERTIES", "TOK_ALTERTABLE_EXCHANGEPARTITION", "TOK_ALTERTABLE_FILEFORMAT", 
		"TOK_ALTERTABLE_LOCATION", "TOK_ALTERTABLE_MERGEFILES", "TOK_ALTERTABLE_OWNER", 
		"TOK_ALTERTABLE_PARTCOLTYPE", "TOK_ALTERTABLE_PROPERTIES", "TOK_ALTERTABLE_RENAME", 
		"TOK_ALTERTABLE_RENAMECOL", "TOK_ALTERTABLE_RENAMEPART", "TOK_ALTERTABLE_REPLACECOLS", 
		"TOK_ALTERTABLE_SERDEPROPERTIES", "TOK_ALTERTABLE_SERIALIZER", "TOK_ALTERTABLE_SKEWED", 
		"TOK_ALTERTABLE_SKEWED_LOCATION", "TOK_ALTERTABLE_TOUCH", "TOK_ALTERTABLE_UNARCHIVE", 
		"TOK_ALTERTABLE_UPDATECOLSTATS", "TOK_ALTERTABLE_UPDATECOLUMNS", "TOK_ALTERTABLE_UPDATESTATS", 
		"TOK_ALTERVIEW", "TOK_ALTERVIEW_ADDPARTS", "TOK_ALTERVIEW_DROPPARTS", 
		"TOK_ALTERVIEW_DROPPROPERTIES", "TOK_ALTERVIEW_PROPERTIES", "TOK_ALTERVIEW_RENAME", 
		"TOK_ALTER_MAPPING", "TOK_ALTER_MATERIALIZED_VIEW", "TOK_ALTER_MATERIALIZED_VIEW_REBUILD", 
		"TOK_ALTER_MATERIALIZED_VIEW_REWRITE", "TOK_ALTER_POOL", "TOK_ALTER_RP", 
		"TOK_ALTER_TRIGGER", "TOK_ANALYZE", "TOK_ARCHIVE", "TOK_BIGINT", "TOK_BINARY", 
		"TOK_BLOCKING", "TOK_BOOLEAN", "TOK_CACHE_METADATA", "TOK_CASCADE", "TOK_CHAR", 
		"TOK_CHARSETLITERAL", "TOK_CHECK_CONSTRAINT", "TOK_CLUSTERBY", "TOK_COLTYPELIST", 
		"TOK_COL_NAME", "TOK_COMMIT", "TOK_CONSTRAINT_NAME", "TOK_CREATEDATABASE", 
		"TOK_CREATEFUNCTION", "TOK_CREATEMACRO", "TOK_CREATEROLE", "TOK_CREATETABLE", 
		"TOK_CREATEVIEW", "TOK_CREATE_MAPPING", "TOK_CREATE_MATERIALIZED_VIEW", 
		"TOK_CREATE_POOL", "TOK_CREATE_RP", "TOK_CREATE_TRIGGER", "TOK_CROSSJOIN", 
		"TOK_CTE", "TOK_CUBE_GROUPBY", "TOK_DATABASECOMMENT", "TOK_DATABASELOCATION", 
		"TOK_DATABASEPROPERTIES", "TOK_DATE", "TOK_DATELITERAL", "TOK_DATETIME", 
		"TOK_DBNAME", "TOK_DBPROPLIST", "TOK_DB_TYPE", "TOK_DECIMAL", "TOK_DEFAULT_POOL", 
		"TOK_DEFAULT_VALUE", "TOK_DELETE", "TOK_DELETE_FROM", "TOK_DESCDATABASE", 
		"TOK_DESCFUNCTION", "TOK_DESCTABLE", "TOK_DESTINATION", "TOK_DETAIL", 
		"TOK_DIR", "TOK_DISABLE", "TOK_DISTRIBUTEBY", "TOK_DOUBLE", "TOK_DROPDATABASE", 
		"TOK_DROPFUNCTION", "TOK_DROPMACRO", "TOK_DROPROLE", "TOK_DROPTABLE", 
		"TOK_DROPVIEW", "TOK_DROP_MAPPING", "TOK_DROP_MATERIALIZED_VIEW", "TOK_DROP_POOL", 
		"TOK_DROP_RP", "TOK_DROP_TRIGGER", "TOK_ENABLE", "TOK_EXCEPTALL", "TOK_EXCEPTDISTINCT", 
		"TOK_EXPLAIN", "TOK_EXPLAIN_SQ_REWRITE", "TOK_EXPLIST", "TOK_EXPORT", 
		"TOK_EXPRESSION", "TOK_FALSE", "TOK_FILE", "TOK_FILEFORMAT_GENERIC", "TOK_FLOAT", 
		"TOK_FOREIGN_KEY", "TOK_FROM", "TOK_FULLOUTERJOIN", "TOK_FUNCTION", "TOK_FUNCTIONDI", 
		"TOK_FUNCTIONSTAR", "TOK_GRANT", "TOK_GRANT_OPTION_FOR", "TOK_GRANT_ROLE", 
		"TOK_GRANT_WITH_ADMIN_OPTION", "TOK_GRANT_WITH_OPTION", "TOK_GROUP", "TOK_GROUPBY", 
		"TOK_GROUPING_SETS", "TOK_GROUPING_SETS_EXPRESSION", "TOK_HAVING", "TOK_IFEXISTS", 
		"TOK_IFNOTEXISTS", "TOK_IMPORT", "TOK_INPUTFORMAT", "TOK_INSERT", "TOK_INSERT_INTO", 
		"TOK_INT", "TOK_INTERSECTALL", "TOK_INTERSECTDISTINCT", "TOK_INTERVAL_DAY_LITERAL", 
		"TOK_INTERVAL_DAY_TIME", "TOK_INTERVAL_DAY_TIME_LITERAL", "TOK_INTERVAL_HOUR_LITERAL", 
		"TOK_INTERVAL_MINUTE_LITERAL", "TOK_INTERVAL_MONTH_LITERAL", "TOK_INTERVAL_SECOND_LITERAL", 
		"TOK_INTERVAL_YEAR_LITERAL", "TOK_INTERVAL_YEAR_MONTH", "TOK_INTERVAL_YEAR_MONTH_LITERAL", 
		"TOK_ISOLATION_LEVEL", "TOK_ISOLATION_SNAPSHOT", "TOK_JAR", "TOK_JOIN", 
		"TOK_KILL_QUERY", "TOK_LATERAL_VIEW", "TOK_LATERAL_VIEW_OUTER", "TOK_LEFTOUTERJOIN", 
		"TOK_LEFTSEMIJOIN", "TOK_LENGTH", "TOK_LIKERP", "TOK_LIKETABLE", "TOK_LIMIT", 
		"TOK_LIST", "TOK_LOAD", "TOK_LOCKDB", "TOK_LOCKTABLE", "TOK_MAP", "TOK_MATCHED", 
		"TOK_MERGE", "TOK_METADATA", "TOK_MSCK", "TOK_NORELY", "TOK_NOT_CLUSTERED", 
		"TOK_NOT_MATCHED", "TOK_NOT_NULL", "TOK_NOT_SORTED", "TOK_NOVALIDATE", 
		"TOK_NO_DROP", "TOK_NULL", "TOK_NULLS_FIRST", "TOK_NULLS_LAST", "TOK_OFFLINE", 
		"TOK_OFFSET", "TOK_ONLY", "TOK_OPERATOR", "TOK_OP_ADD", "TOK_OP_AND", 
		"TOK_OP_BITAND", "TOK_OP_BITNOT", "TOK_OP_BITOR", "TOK_OP_BITXOR", "TOK_OP_DIV", 
		"TOK_OP_EQ", "TOK_OP_GE", "TOK_OP_GT", "TOK_OP_LE", "TOK_OP_LIKE", "TOK_OP_LT", 
		"TOK_OP_MOD", "TOK_OP_MUL", "TOK_OP_NE", "TOK_OP_NOT", "TOK_OP_OR", "TOK_OP_SUB", 
		"TOK_ORDERBY", "TOK_ORREPLACE", "TOK_PARTITIONINGSPEC", "TOK_PARTITIONLOCATION", 
		"TOK_PARTSPEC", "TOK_PARTVAL", "TOK_PATH", "TOK_PERCENT", "TOK_PRIMARY_KEY", 
		"TOK_PRINCIPAL_NAME", "TOK_PRIVILEGE", "TOK_PRIVILEGE_LIST", "TOK_PRIV_ALL", 
		"TOK_PRIV_ALTER_DATA", "TOK_PRIV_ALTER_METADATA", "TOK_PRIV_CREATE", "TOK_PRIV_DELETE", 
		"TOK_PRIV_DROP", "TOK_PRIV_INSERT", "TOK_PRIV_LOCK", "TOK_PRIV_OBJECT", 
		"TOK_PRIV_OBJECT_COL", "TOK_PRIV_SELECT", "TOK_PRIV_SHOW_DATABASE", "TOK_PTBLFUNCTION", 
		"TOK_QUERY", "TOK_QUERY_PARALLELISM", "TOK_READONLY", "TOK_RECORDREADER", 
		"TOK_RECORDWRITER", "TOK_RELOADFUNCTION", "TOK_RELY", "TOK_RENAME", "TOK_REPLACE", 
		"TOK_REPLICATION", "TOK_REPL_CONFIG", "TOK_REPL_CONFIG_LIST", "TOK_REPL_DUMP", 
		"TOK_REPL_LOAD", "TOK_REPL_STATUS", "TOK_RESOURCE_ALL", "TOK_RESOURCE_LIST", 
		"TOK_RESOURCE_URI", "TOK_RESTRICT", "TOK_REVOKE", "TOK_REVOKE_ROLE", "TOK_REWRITE_DISABLED", 
		"TOK_REWRITE_ENABLED", "TOK_RIGHTOUTERJOIN", "TOK_ROLE", "TOK_ROLLBACK", 
		"TOK_ROLLUP_GROUPBY", "TOK_ROWCOUNT", "TOK_SCHEDULING_POLICY", "TOK_SELECT", 
		"TOK_SELECTDI", "TOK_SELEXPR", "TOK_SERDE", "TOK_SERDENAME", "TOK_SERDEPROPS", 
		"TOK_SERVER_TYPE", "TOK_SETCOLREF", "TOK_SET_AUTOCOMMIT", "TOK_SET_COLUMNS_CLAUSE", 
		"TOK_SHOWCOLUMNS", "TOK_SHOWCONF", "TOK_SHOWDATABASES", "TOK_SHOWDBLOCKS", 
		"TOK_SHOWFUNCTIONS", "TOK_SHOWLOCKS", "TOK_SHOWMATERIALIZEDVIEWS", "TOK_SHOWPARTITIONS", 
		"TOK_SHOWTABLES", "TOK_SHOWVIEWS", "TOK_SHOW_COMPACTIONS", "TOK_SHOW_CREATEDATABASE", 
		"TOK_SHOW_CREATETABLE", "TOK_SHOW_GRANT", "TOK_SHOW_ROLES", "TOK_SHOW_ROLE_GRANT", 
		"TOK_SHOW_ROLE_PRINCIPALS", "TOK_SHOW_RP", "TOK_SHOW_SET_ROLE", "TOK_SHOW_TABLESTATUS", 
		"TOK_SHOW_TBLPROPERTIES", "TOK_SHOW_TRANSACTIONS", "TOK_SKEWED_LOCATIONS", 
		"TOK_SKEWED_LOCATION_LIST", "TOK_SKEWED_LOCATION_MAP", "TOK_SMALLINT", 
		"TOK_SORTBY", "TOK_START_TRANSACTION", "TOK_STORAGEHANDLER", "TOK_STOREDASDIRS", 
		"TOK_STRING", "TOK_STRINGLITERALSEQUENCE", "TOK_STRUCT", "TOK_SUBQUERY", 
		"TOK_SUBQUERY_EXPR", "TOK_SUBQUERY_OP", "TOK_SUBQUERY_OP_NOTEXISTS", "TOK_SUBQUERY_OP_NOTIN", 
		"TOK_SUMMARY", "TOK_SWITCHDATABASE", "TOK_TAB", "TOK_TABALIAS", "TOK_TABCOL", 
		"TOK_TABCOLLIST", "TOK_TABCOLNAME", "TOK_TABCOLVALUE", "TOK_TABCOLVALUES", 
		"TOK_TABCOLVALUE_PAIR", "TOK_TABLEBUCKETSAMPLE", "TOK_TABLECOMMENT", "TOK_TABLEFILEFORMAT", 
		"TOK_TABLELOCATION", "TOK_TABLEPARTCOLS", "TOK_TABLEPROPERTIES", "TOK_TABLEPROPERTY", 
		"TOK_TABLEPROPLIST", "TOK_TABLEROWFORMAT", "TOK_TABLEROWFORMATCOLLITEMS", 
		"TOK_TABLEROWFORMATFIELD", "TOK_TABLEROWFORMATLINES", "TOK_TABLEROWFORMATMAPKEYS", 
		"TOK_TABLEROWFORMATNULL", "TOK_TABLESERIALIZER", "TOK_TABLESKEWED", "TOK_TABLESPLITSAMPLE", 
		"TOK_TABLE_OR_COL", "TOK_TABLE_PARTITION", "TOK_TABLE_TYPE", "TOK_TABNAME", 
		"TOK_TABREF", "TOK_TABSORTCOLNAMEASC", "TOK_TABSORTCOLNAMEDESC", "TOK_TABSRC", 
		"TOK_TABTYPE", "TOK_TEMPORARY", "TOK_TIMESTAMP", "TOK_TIMESTAMPLITERAL", 
		"TOK_TIMESTAMPLOCALTZ", "TOK_TIMESTAMPLOCALTZLITERAL", "TOK_TINYINT", 
		"TOK_TMP_FILE", "TOK_TO", "TOK_TRANSFORM", "TOK_TRIGGER_EXPRESSION", "TOK_TRUE", 
		"TOK_TRUNCATETABLE", "TOK_TXN_ACCESS_MODE", "TOK_TXN_READ_ONLY", "TOK_TXN_READ_WRITE", 
		"TOK_UNIONALL", "TOK_UNIONDISTINCT", "TOK_UNIONTYPE", "TOK_UNIQUE", "TOK_UNIQUEJOIN", 
		"TOK_UNLOCKDB", "TOK_UNLOCKTABLE", "TOK_UNMANAGED", "TOK_UPDATE", "TOK_UPDATE_TABLE", 
		"TOK_URI_TYPE", "TOK_USER", "TOK_USERSCRIPTCOLNAMES", "TOK_USERSCRIPTCOLSCHEMA", 
		"TOK_VALIDATE", "TOK_VARCHAR", "TOK_VIEWPARTCOLS", "TOK_WHERE", "TOK_WINDOWDEF", 
		"TOK_WINDOWRANGE", "TOK_WINDOWSPEC", "TOK_WINDOWVALUES", "1107"
	};
	public static final int EOF=-1;
	public static final int AMPERSAND=4;
	public static final int BITWISEOR=5;
	public static final int BITWISEXOR=6;
	public static final int ByteLengthFullLiteral=7;
	public static final int ByteLengthLiteral=8;
	public static final int COLON=9;
	public static final int COMMA=10;
	public static final int CONCATENATE=11;
	public static final int CharSetLiteral=12;
	public static final int CharSetName=13;
	public static final int DIV=14;
	public static final int DIVIDE=15;
	public static final int DOLLAR=16;
	public static final int DOT=17;
	public static final int Digit=18;
	public static final int EQUAL=19;
	public static final int EQUAL_NS=20;
	public static final int Exponent=21;
	public static final int GREATERTHAN=22;
	public static final int GREATERTHANOREQUALTO=23;
	public static final int HexDigit=24;
	public static final int Identifier=25;
	public static final int IntegralLiteral=26;
	public static final int KW_ABORT=27;
	public static final int KW_ACTIVATE=28;
	public static final int KW_ACTIVE=29;
	public static final int KW_ADD=30;
	public static final int KW_ADMIN=31;
	public static final int KW_AFTER=32;
	public static final int KW_ALL=33;
	public static final int KW_ALLOC_FRACTION=34;
	public static final int KW_ALTER=35;
	public static final int KW_ANALYZE=36;
	public static final int KW_AND=37;
	public static final int KW_ANY=38;
	public static final int KW_APPLICATION=39;
	public static final int KW_ARCHIVE=40;
	public static final int KW_ARRAY=41;
	public static final int KW_AS=42;
	public static final int KW_ASC=43;
	public static final int KW_AUTHORIZATION=44;
	public static final int KW_AUTOCOMMIT=45;
	public static final int KW_BEFORE=46;
	public static final int KW_BETWEEN=47;
	public static final int KW_BIGINT=48;
	public static final int KW_BINARY=49;
	public static final int KW_BOOLEAN=50;
	public static final int KW_BOTH=51;
	public static final int KW_BUCKET=52;
	public static final int KW_BUCKETS=53;
	public static final int KW_BY=54;
	public static final int KW_CACHE=55;
	public static final int KW_CASCADE=56;
	public static final int KW_CASE=57;
	public static final int KW_CAST=58;
	public static final int KW_CHANGE=59;
	public static final int KW_CHAR=60;
	public static final int KW_CHECK=61;
	public static final int KW_CLUSTER=62;
	public static final int KW_CLUSTERED=63;
	public static final int KW_CLUSTERSTATUS=64;
	public static final int KW_COLLECTION=65;
	public static final int KW_COLUMN=66;
	public static final int KW_COLUMNS=67;
	public static final int KW_COMMENT=68;
	public static final int KW_COMMIT=69;
	public static final int KW_COMPACT=70;
	public static final int KW_COMPACTIONS=71;
	public static final int KW_COMPUTE=72;
	public static final int KW_CONCATENATE=73;
	public static final int KW_CONF=74;
	public static final int KW_CONSTRAINT=75;
	public static final int KW_CONTINUE=76;
	public static final int KW_CREATE=77;
	public static final int KW_CROSS=78;
	public static final int KW_CUBE=79;
	public static final int KW_CURRENT=80;
	public static final int KW_CURRENT_DATE=81;
	public static final int KW_CURRENT_TIMESTAMP=82;
	public static final int KW_CURSOR=83;
	public static final int KW_DATA=84;
	public static final int KW_DATABASE=85;
	public static final int KW_DATABASES=86;
	public static final int KW_DATE=87;
	public static final int KW_DATETIME=88;
	public static final int KW_DAY=89;
	public static final int KW_DBPROPERTIES=90;
	public static final int KW_DECIMAL=91;
	public static final int KW_DEFAULT=92;
	public static final int KW_DEFERRED=93;
	public static final int KW_DEFINED=94;
	public static final int KW_DELETE=95;
	public static final int KW_DELIMITED=96;
	public static final int KW_DEPENDENCY=97;
	public static final int KW_DESC=98;
	public static final int KW_DESCRIBE=99;
	public static final int KW_DETAIL=100;
	public static final int KW_DIRECTORIES=101;
	public static final int KW_DIRECTORY=102;
	public static final int KW_DISABLE=103;
	public static final int KW_DISTINCT=104;
	public static final int KW_DISTRIBUTE=105;
	public static final int KW_DO=106;
	public static final int KW_DOUBLE=107;
	public static final int KW_DOW=108;
	public static final int KW_DROP=109;
	public static final int KW_DUMP=110;
	public static final int KW_ELEM_TYPE=111;
	public static final int KW_ELSE=112;
	public static final int KW_ENABLE=113;
	public static final int KW_END=114;
	public static final int KW_ENFORCED=115;
	public static final int KW_ESCAPED=116;
	public static final int KW_EXCEPT=117;
	public static final int KW_EXCHANGE=118;
	public static final int KW_EXCLUSIVE=119;
	public static final int KW_EXISTS=120;
	public static final int KW_EXPLAIN=121;
	public static final int KW_EXPORT=122;
	public static final int KW_EXPRESSION=123;
	public static final int KW_EXTENDED=124;
	public static final int KW_EXTERNAL=125;
	public static final int KW_EXTRACT=126;
	public static final int KW_FALSE=127;
	public static final int KW_FETCH=128;
	public static final int KW_FIELDS=129;
	public static final int KW_FILE=130;
	public static final int KW_FILEFORMAT=131;
	public static final int KW_FIRST=132;
	public static final int KW_FLOAT=133;
	public static final int KW_FLOOR=134;
	public static final int KW_FOLLOWING=135;
	public static final int KW_FOR=136;
	public static final int KW_FOREIGN=137;
	public static final int KW_FORMAT=138;
	public static final int KW_FORMATTED=139;
	public static final int KW_FROM=140;
	public static final int KW_FULL=141;
	public static final int KW_FUNCTION=142;
	public static final int KW_FUNCTIONS=143;
	public static final int KW_GRANT=144;
	public static final int KW_GROUP=145;
	public static final int KW_GROUPING=146;
	public static final int KW_HAVING=147;
	public static final int KW_HOUR=148;
	public static final int KW_IDXPROPERTIES=149;
	public static final int KW_IF=150;
	public static final int KW_IMPORT=151;
	public static final int KW_IN=152;
	public static final int KW_INDEX=153;
	public static final int KW_INDEXES=154;
	public static final int KW_INNER=155;
	public static final int KW_INPATH=156;
	public static final int KW_INPUTDRIVER=157;
	public static final int KW_INPUTFORMAT=158;
	public static final int KW_INSERT=159;
	public static final int KW_INT=160;
	public static final int KW_INTERSECT=161;
	public static final int KW_INTERVAL=162;
	public static final int KW_INTO=163;
	public static final int KW_IS=164;
	public static final int KW_ISOLATION=165;
	public static final int KW_ITEMS=166;
	public static final int KW_JAR=167;
	public static final int KW_JOIN=168;
	public static final int KW_KEY=169;
	public static final int KW_KEYS=170;
	public static final int KW_KEY_TYPE=171;
	public static final int KW_KILL=172;
	public static final int KW_LAST=173;
	public static final int KW_LATERAL=174;
	public static final int KW_LEFT=175;
	public static final int KW_LESS=176;
	public static final int KW_LEVEL=177;
	public static final int KW_LIKE=178;
	public static final int KW_LIMIT=179;
	public static final int KW_LINES=180;
	public static final int KW_LOAD=181;
	public static final int KW_LOCAL=182;
	public static final int KW_LOCATION=183;
	public static final int KW_LOCK=184;
	public static final int KW_LOCKS=185;
	public static final int KW_LOGICAL=186;
	public static final int KW_LONG=187;
	public static final int KW_MACRO=188;
	public static final int KW_MANAGEMENT=189;
	public static final int KW_MAP=190;
	public static final int KW_MAPJOIN=191;
	public static final int KW_MAPPING=192;
	public static final int KW_MATCHED=193;
	public static final int KW_MATERIALIZED=194;
	public static final int KW_MERGE=195;
	public static final int KW_METADATA=196;
	public static final int KW_MINUS=197;
	public static final int KW_MINUTE=198;
	public static final int KW_MONTH=199;
	public static final int KW_MORE=200;
	public static final int KW_MOVE=201;
	public static final int KW_MSCK=202;
	public static final int KW_NONE=203;
	public static final int KW_NORELY=204;
	public static final int KW_NOSCAN=205;
	public static final int KW_NOT=206;
	public static final int KW_NOVALIDATE=207;
	public static final int KW_NULL=208;
	public static final int KW_NULLS=209;
	public static final int KW_OF=210;
	public static final int KW_OFFSET=211;
	public static final int KW_ON=212;
	public static final int KW_ONLY=213;
	public static final int KW_OPERATOR=214;
	public static final int KW_OPTION=215;
	public static final int KW_OR=216;
	public static final int KW_ORDER=217;
	public static final int KW_OUT=218;
	public static final int KW_OUTER=219;
	public static final int KW_OUTPUTDRIVER=220;
	public static final int KW_OUTPUTFORMAT=221;
	public static final int KW_OVER=222;
	public static final int KW_OVERWRITE=223;
	public static final int KW_OWNER=224;
	public static final int KW_PARTITION=225;
	public static final int KW_PARTITIONED=226;
	public static final int KW_PARTITIONS=227;
	public static final int KW_PATH=228;
	public static final int KW_PERCENT=229;
	public static final int KW_PLAN=230;
	public static final int KW_PLANS=231;
	public static final int KW_PLUS=232;
	public static final int KW_POOL=233;
	public static final int KW_PRECEDING=234;
	public static final int KW_PRECISION=235;
	public static final int KW_PRESERVE=236;
	public static final int KW_PRIMARY=237;
	public static final int KW_PRINCIPALS=238;
	public static final int KW_PROCEDURE=239;
	public static final int KW_PURGE=240;
	public static final int KW_QUARTER=241;
	public static final int KW_QUERY=242;
	public static final int KW_QUERY_PARALLELISM=243;
	public static final int KW_RANGE=244;
	public static final int KW_READ=245;
	public static final int KW_READS=246;
	public static final int KW_REBUILD=247;
	public static final int KW_RECORDREADER=248;
	public static final int KW_RECORDWRITER=249;
	public static final int KW_REDUCE=250;
	public static final int KW_REFERENCES=251;
	public static final int KW_REGEXP=252;
	public static final int KW_RELOAD=253;
	public static final int KW_RELY=254;
	public static final int KW_RENAME=255;
	public static final int KW_REOPTIMIZATION=256;
	public static final int KW_REPAIR=257;
	public static final int KW_REPL=258;
	public static final int KW_REPLACE=259;
	public static final int KW_REPLICATION=260;
	public static final int KW_RESOURCE=261;
	public static final int KW_RESTRICT=262;
	public static final int KW_REVOKE=263;
	public static final int KW_REWRITE=264;
	public static final int KW_RIGHT=265;
	public static final int KW_RLIKE=266;
	public static final int KW_ROLE=267;
	public static final int KW_ROLES=268;
	public static final int KW_ROLLBACK=269;
	public static final int KW_ROLLUP=270;
	public static final int KW_ROW=271;
	public static final int KW_ROWS=272;
	public static final int KW_SCHEDULING_POLICY=273;
	public static final int KW_SCHEMA=274;
	public static final int KW_SCHEMAS=275;
	public static final int KW_SECOND=276;
	public static final int KW_SELECT=277;
	public static final int KW_SEMI=278;
	public static final int KW_SERDE=279;
	public static final int KW_SERDEPROPERTIES=280;
	public static final int KW_SERVER=281;
	public static final int KW_SET=282;
	public static final int KW_SETS=283;
	public static final int KW_SHARED=284;
	public static final int KW_SHOW=285;
	public static final int KW_SHOW_DATABASE=286;
	public static final int KW_SKEWED=287;
	public static final int KW_SMALLINT=288;
	public static final int KW_SNAPSHOT=289;
	public static final int KW_SORT=290;
	public static final int KW_SORTED=291;
	public static final int KW_SSL=292;
	public static final int KW_START=293;
	public static final int KW_STATISTICS=294;
	public static final int KW_STATUS=295;
	public static final int KW_STORED=296;
	public static final int KW_STREAMTABLE=297;
	public static final int KW_STRING=298;
	public static final int KW_STRUCT=299;
	public static final int KW_SUMMARY=300;
	public static final int KW_SYNC=301;
	public static final int KW_TABLE=302;
	public static final int KW_TABLES=303;
	public static final int KW_TABLESAMPLE=304;
	public static final int KW_TBLPROPERTIES=305;
	public static final int KW_TEMPORARY=306;
	public static final int KW_TERMINATED=307;
	public static final int KW_THEN=308;
	public static final int KW_TIME=309;
	public static final int KW_TIMESTAMP=310;
	public static final int KW_TIMESTAMPLOCALTZ=311;
	public static final int KW_TINYINT=312;
	public static final int KW_TO=313;
	public static final int KW_TOUCH=314;
	public static final int KW_TRANSACTION=315;
	public static final int KW_TRANSACTIONS=316;
	public static final int KW_TRANSFORM=317;
	public static final int KW_TRIGGER=318;
	public static final int KW_TRUE=319;
	public static final int KW_TRUNCATE=320;
	public static final int KW_UNARCHIVE=321;
	public static final int KW_UNBOUNDED=322;
	public static final int KW_UNDO=323;
	public static final int KW_UNION=324;
	public static final int KW_UNIONTYPE=325;
	public static final int KW_UNIQUE=326;
	public static final int KW_UNIQUEJOIN=327;
	public static final int KW_UNLOCK=328;
	public static final int KW_UNMANAGED=329;
	public static final int KW_UNSET=330;
	public static final int KW_UNSIGNED=331;
	public static final int KW_UPDATE=332;
	public static final int KW_URI=333;
	public static final int KW_USE=334;
	public static final int KW_USER=335;
	public static final int KW_USING=336;
	public static final int KW_UTC=337;
	public static final int KW_UTCTIMESTAMP=338;
	public static final int KW_VALIDATE=339;
	public static final int KW_VALUES=340;
	public static final int KW_VALUE_TYPE=341;
	public static final int KW_VARCHAR=342;
	public static final int KW_VECTORIZATION=343;
	public static final int KW_VIEW=344;
	public static final int KW_VIEWS=345;
	public static final int KW_WAIT=346;
	public static final int KW_WEEK=347;
	public static final int KW_WHEN=348;
	public static final int KW_WHERE=349;
	public static final int KW_WHILE=350;
	public static final int KW_WINDOW=351;
	public static final int KW_WITH=352;
	public static final int KW_WORK=353;
	public static final int KW_WORKLOAD=354;
	public static final int KW_WRITE=355;
	public static final int KW_YEAR=356;
	public static final int KW_ZONE=357;
	public static final int LCURLY=358;
	public static final int LESSTHAN=359;
	public static final int LESSTHANOREQUALTO=360;
	public static final int LINE_COMMENT=361;
	public static final int LPAREN=362;
	public static final int LSQUARE=363;
	public static final int Letter=364;
	public static final int MINUS=365;
	public static final int MOD=366;
	public static final int NOTEQUAL=367;
	public static final int Number=368;
	public static final int NumberLiteral=369;
	public static final int PLUS=370;
	public static final int QUERY_HINT=371;
	public static final int QUESTION=372;
	public static final int QuotedIdentifier=373;
	public static final int RCURLY=374;
	public static final int RPAREN=375;
	public static final int RSQUARE=376;
	public static final int RegexComponent=377;
	public static final int SEMICOLON=378;
	public static final int STAR=379;
	public static final int StringLiteral=380;
	public static final int TILDE=381;
	public static final int TimeFullLiteral=382;
	public static final int WS=383;
	public static final int KW_BATCH=420;
	public static final int KW_DAYOFWEEK=457;
	public static final int KW_HOLD_DDLTIME=504;
	public static final int KW_IGNORE=508;
	public static final int KW_NO_DROP=555;
	public static final int KW_OFFLINE=559;
	public static final int KW_PROTECTION=585;
	public static final int KW_READONLY=592;
	public static final int KW_TIMESTAMPTZ=652;
	public static final int TOK_ABORT_TRANSACTIONS=711;
	public static final int TOK_ACTIVATE=712;
	public static final int TOK_ADD_TRIGGER=713;
	public static final int TOK_ADMIN_OPTION_FOR=714;
	public static final int TOK_ALIASLIST=715;
	public static final int TOK_ALLCOLREF=716;
	public static final int TOK_ALLOC_FRACTION=717;
	public static final int TOK_ALTERDATABASE_LOCATION=718;
	public static final int TOK_ALTERDATABASE_OWNER=719;
	public static final int TOK_ALTERDATABASE_PROPERTIES=720;
	public static final int TOK_ALTERTABLE=721;
	public static final int TOK_ALTERTABLE_ADDCOLS=722;
	public static final int TOK_ALTERTABLE_ADDCONSTRAINT=723;
	public static final int TOK_ALTERTABLE_ADDPARTS=724;
	public static final int TOK_ALTERTABLE_ARCHIVE=725;
	public static final int TOK_ALTERTABLE_BUCKETS=726;
	public static final int TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION=727;
	public static final int TOK_ALTERTABLE_CLUSTER_SORT=728;
	public static final int TOK_ALTERTABLE_COMPACT=729;
	public static final int TOK_ALTERTABLE_DROPCONSTRAINT=730;
	public static final int TOK_ALTERTABLE_DROPPARTS=731;
	public static final int TOK_ALTERTABLE_DROPPROPERTIES=732;
	public static final int TOK_ALTERTABLE_EXCHANGEPARTITION=733;
	public static final int TOK_ALTERTABLE_FILEFORMAT=734;
	public static final int TOK_ALTERTABLE_LOCATION=735;
	public static final int TOK_ALTERTABLE_MERGEFILES=736;
	public static final int TOK_ALTERTABLE_OWNER=737;
	public static final int TOK_ALTERTABLE_PARTCOLTYPE=738;
	public static final int TOK_ALTERTABLE_PROPERTIES=739;
	public static final int TOK_ALTERTABLE_RENAME=740;
	public static final int TOK_ALTERTABLE_RENAMECOL=741;
	public static final int TOK_ALTERTABLE_RENAMEPART=742;
	public static final int TOK_ALTERTABLE_REPLACECOLS=743;
	public static final int TOK_ALTERTABLE_SERDEPROPERTIES=744;
	public static final int TOK_ALTERTABLE_SERIALIZER=745;
	public static final int TOK_ALTERTABLE_SKEWED=746;
	public static final int TOK_ALTERTABLE_SKEWED_LOCATION=747;
	public static final int TOK_ALTERTABLE_TOUCH=748;
	public static final int TOK_ALTERTABLE_UNARCHIVE=749;
	public static final int TOK_ALTERTABLE_UPDATECOLSTATS=750;
	public static final int TOK_ALTERTABLE_UPDATECOLUMNS=751;
	public static final int TOK_ALTERTABLE_UPDATESTATS=752;
	public static final int TOK_ALTERVIEW=753;
	public static final int TOK_ALTERVIEW_ADDPARTS=754;
	public static final int TOK_ALTERVIEW_DROPPARTS=755;
	public static final int TOK_ALTERVIEW_DROPPROPERTIES=756;
	public static final int TOK_ALTERVIEW_PROPERTIES=757;
	public static final int TOK_ALTERVIEW_RENAME=758;
	public static final int TOK_ALTER_MAPPING=759;
	public static final int TOK_ALTER_MATERIALIZED_VIEW=760;
	public static final int TOK_ALTER_MATERIALIZED_VIEW_REBUILD=761;
	public static final int TOK_ALTER_MATERIALIZED_VIEW_REWRITE=762;
	public static final int TOK_ALTER_POOL=763;
	public static final int TOK_ALTER_RP=764;
	public static final int TOK_ALTER_TRIGGER=765;
	public static final int TOK_ANALYZE=766;
	public static final int TOK_ARCHIVE=767;
	public static final int TOK_BIGINT=768;
	public static final int TOK_BINARY=769;
	public static final int TOK_BLOCKING=770;
	public static final int TOK_BOOLEAN=771;
	public static final int TOK_CACHE_METADATA=772;
	public static final int TOK_CASCADE=773;
	public static final int TOK_CHAR=774;
	public static final int TOK_CHARSETLITERAL=775;
	public static final int TOK_CHECK_CONSTRAINT=776;
	public static final int TOK_CLUSTERBY=777;
	public static final int TOK_COLTYPELIST=778;
	public static final int TOK_COL_NAME=779;
	public static final int TOK_COMMIT=780;
	public static final int TOK_CONSTRAINT_NAME=781;
	public static final int TOK_CREATEDATABASE=782;
	public static final int TOK_CREATEFUNCTION=783;
	public static final int TOK_CREATEMACRO=784;
	public static final int TOK_CREATEROLE=785;
	public static final int TOK_CREATETABLE=786;
	public static final int TOK_CREATEVIEW=787;
	public static final int TOK_CREATE_MAPPING=788;
	public static final int TOK_CREATE_MATERIALIZED_VIEW=789;
	public static final int TOK_CREATE_POOL=790;
	public static final int TOK_CREATE_RP=791;
	public static final int TOK_CREATE_TRIGGER=792;
	public static final int TOK_CROSSJOIN=793;
	public static final int TOK_CTE=794;
	public static final int TOK_CUBE_GROUPBY=795;
	public static final int TOK_DATABASECOMMENT=796;
	public static final int TOK_DATABASELOCATION=797;
	public static final int TOK_DATABASEPROPERTIES=798;
	public static final int TOK_DATE=799;
	public static final int TOK_DATELITERAL=800;
	public static final int TOK_DATETIME=801;
	public static final int TOK_DBNAME=802;
	public static final int TOK_DBPROPLIST=803;
	public static final int TOK_DB_TYPE=804;
	public static final int TOK_DECIMAL=805;
	public static final int TOK_DEFAULT_POOL=806;
	public static final int TOK_DEFAULT_VALUE=807;
	public static final int TOK_DELETE=808;
	public static final int TOK_DELETE_FROM=809;
	public static final int TOK_DESCDATABASE=810;
	public static final int TOK_DESCFUNCTION=811;
	public static final int TOK_DESCTABLE=812;
	public static final int TOK_DESTINATION=813;
	public static final int TOK_DETAIL=814;
	public static final int TOK_DIR=815;
	public static final int TOK_DISABLE=816;
	public static final int TOK_DISTRIBUTEBY=817;
	public static final int TOK_DOUBLE=818;
	public static final int TOK_DROPDATABASE=819;
	public static final int TOK_DROPFUNCTION=820;
	public static final int TOK_DROPMACRO=821;
	public static final int TOK_DROPROLE=822;
	public static final int TOK_DROPTABLE=823;
	public static final int TOK_DROPVIEW=824;
	public static final int TOK_DROP_MAPPING=825;
	public static final int TOK_DROP_MATERIALIZED_VIEW=826;
	public static final int TOK_DROP_POOL=827;
	public static final int TOK_DROP_RP=828;
	public static final int TOK_DROP_TRIGGER=829;
	public static final int TOK_ENABLE=830;
	public static final int TOK_EXCEPTALL=831;
	public static final int TOK_EXCEPTDISTINCT=832;
	public static final int TOK_EXPLAIN=833;
	public static final int TOK_EXPLAIN_SQ_REWRITE=834;
	public static final int TOK_EXPLIST=835;
	public static final int TOK_EXPORT=836;
	public static final int TOK_EXPRESSION=837;
	public static final int TOK_FALSE=838;
	public static final int TOK_FILE=839;
	public static final int TOK_FILEFORMAT_GENERIC=840;
	public static final int TOK_FLOAT=841;
	public static final int TOK_FOREIGN_KEY=842;
	public static final int TOK_FROM=843;
	public static final int TOK_FULLOUTERJOIN=844;
	public static final int TOK_FUNCTION=845;
	public static final int TOK_FUNCTIONDI=846;
	public static final int TOK_FUNCTIONSTAR=847;
	public static final int TOK_GRANT=848;
	public static final int TOK_GRANT_OPTION_FOR=849;
	public static final int TOK_GRANT_ROLE=850;
	public static final int TOK_GRANT_WITH_ADMIN_OPTION=851;
	public static final int TOK_GRANT_WITH_OPTION=852;
	public static final int TOK_GROUP=853;
	public static final int TOK_GROUPBY=854;
	public static final int TOK_GROUPING_SETS=855;
	public static final int TOK_GROUPING_SETS_EXPRESSION=856;
	public static final int TOK_HAVING=857;
	public static final int TOK_IFEXISTS=858;
	public static final int TOK_IFNOTEXISTS=859;
	public static final int TOK_IMPORT=860;
	public static final int TOK_INPUTFORMAT=861;
	public static final int TOK_INSERT=862;
	public static final int TOK_INSERT_INTO=863;
	public static final int TOK_INT=864;
	public static final int TOK_INTERSECTALL=865;
	public static final int TOK_INTERSECTDISTINCT=866;
	public static final int TOK_INTERVAL_DAY_LITERAL=867;
	public static final int TOK_INTERVAL_DAY_TIME=868;
	public static final int TOK_INTERVAL_DAY_TIME_LITERAL=869;
	public static final int TOK_INTERVAL_HOUR_LITERAL=870;
	public static final int TOK_INTERVAL_MINUTE_LITERAL=871;
	public static final int TOK_INTERVAL_MONTH_LITERAL=872;
	public static final int TOK_INTERVAL_SECOND_LITERAL=873;
	public static final int TOK_INTERVAL_YEAR_LITERAL=874;
	public static final int TOK_INTERVAL_YEAR_MONTH=875;
	public static final int TOK_INTERVAL_YEAR_MONTH_LITERAL=876;
	public static final int TOK_ISOLATION_LEVEL=877;
	public static final int TOK_ISOLATION_SNAPSHOT=878;
	public static final int TOK_JAR=879;
	public static final int TOK_JOIN=880;
	public static final int TOK_KILL_QUERY=881;
	public static final int TOK_LATERAL_VIEW=882;
	public static final int TOK_LATERAL_VIEW_OUTER=883;
	public static final int TOK_LEFTOUTERJOIN=884;
	public static final int TOK_LEFTSEMIJOIN=885;
	public static final int TOK_LENGTH=886;
	public static final int TOK_LIKERP=887;
	public static final int TOK_LIKETABLE=888;
	public static final int TOK_LIMIT=889;
	public static final int TOK_LIST=890;
	public static final int TOK_LOAD=891;
	public static final int TOK_LOCKDB=892;
	public static final int TOK_LOCKTABLE=893;
	public static final int TOK_MAP=894;
	public static final int TOK_MATCHED=895;
	public static final int TOK_MERGE=896;
	public static final int TOK_METADATA=897;
	public static final int TOK_MSCK=898;
	public static final int TOK_NORELY=899;
	public static final int TOK_NOT_CLUSTERED=900;
	public static final int TOK_NOT_MATCHED=901;
	public static final int TOK_NOT_NULL=902;
	public static final int TOK_NOT_SORTED=903;
	public static final int TOK_NOVALIDATE=904;
	public static final int TOK_NO_DROP=905;
	public static final int TOK_NULL=906;
	public static final int TOK_NULLS_FIRST=907;
	public static final int TOK_NULLS_LAST=908;
	public static final int TOK_OFFLINE=909;
	public static final int TOK_OFFSET=910;
	public static final int TOK_ONLY=911;
	public static final int TOK_OPERATOR=912;
	public static final int TOK_OP_ADD=913;
	public static final int TOK_OP_AND=914;
	public static final int TOK_OP_BITAND=915;
	public static final int TOK_OP_BITNOT=916;
	public static final int TOK_OP_BITOR=917;
	public static final int TOK_OP_BITXOR=918;
	public static final int TOK_OP_DIV=919;
	public static final int TOK_OP_EQ=920;
	public static final int TOK_OP_GE=921;
	public static final int TOK_OP_GT=922;
	public static final int TOK_OP_LE=923;
	public static final int TOK_OP_LIKE=924;
	public static final int TOK_OP_LT=925;
	public static final int TOK_OP_MOD=926;
	public static final int TOK_OP_MUL=927;
	public static final int TOK_OP_NE=928;
	public static final int TOK_OP_NOT=929;
	public static final int TOK_OP_OR=930;
	public static final int TOK_OP_SUB=931;
	public static final int TOK_ORDERBY=932;
	public static final int TOK_ORREPLACE=933;
	public static final int TOK_PARTITIONINGSPEC=934;
	public static final int TOK_PARTITIONLOCATION=935;
	public static final int TOK_PARTSPEC=936;
	public static final int TOK_PARTVAL=937;
	public static final int TOK_PATH=938;
	public static final int TOK_PERCENT=939;
	public static final int TOK_PRIMARY_KEY=940;
	public static final int TOK_PRINCIPAL_NAME=941;
	public static final int TOK_PRIVILEGE=942;
	public static final int TOK_PRIVILEGE_LIST=943;
	public static final int TOK_PRIV_ALL=944;
	public static final int TOK_PRIV_ALTER_DATA=945;
	public static final int TOK_PRIV_ALTER_METADATA=946;
	public static final int TOK_PRIV_CREATE=947;
	public static final int TOK_PRIV_DELETE=948;
	public static final int TOK_PRIV_DROP=949;
	public static final int TOK_PRIV_INSERT=950;
	public static final int TOK_PRIV_LOCK=951;
	public static final int TOK_PRIV_OBJECT=952;
	public static final int TOK_PRIV_OBJECT_COL=953;
	public static final int TOK_PRIV_SELECT=954;
	public static final int TOK_PRIV_SHOW_DATABASE=955;
	public static final int TOK_PTBLFUNCTION=956;
	public static final int TOK_QUERY=957;
	public static final int TOK_QUERY_PARALLELISM=958;
	public static final int TOK_READONLY=959;
	public static final int TOK_RECORDREADER=960;
	public static final int TOK_RECORDWRITER=961;
	public static final int TOK_RELOADFUNCTION=962;
	public static final int TOK_RELY=963;
	public static final int TOK_RENAME=964;
	public static final int TOK_REPLACE=965;
	public static final int TOK_REPLICATION=966;
	public static final int TOK_REPL_CONFIG=967;
	public static final int TOK_REPL_CONFIG_LIST=968;
	public static final int TOK_REPL_DUMP=969;
	public static final int TOK_REPL_LOAD=970;
	public static final int TOK_REPL_STATUS=971;
	public static final int TOK_RESOURCE_ALL=972;
	public static final int TOK_RESOURCE_LIST=973;
	public static final int TOK_RESOURCE_URI=974;
	public static final int TOK_RESTRICT=975;
	public static final int TOK_REVOKE=976;
	public static final int TOK_REVOKE_ROLE=977;
	public static final int TOK_REWRITE_DISABLED=978;
	public static final int TOK_REWRITE_ENABLED=979;
	public static final int TOK_RIGHTOUTERJOIN=980;
	public static final int TOK_ROLE=981;
	public static final int TOK_ROLLBACK=982;
	public static final int TOK_ROLLUP_GROUPBY=983;
	public static final int TOK_ROWCOUNT=984;
	public static final int TOK_SCHEDULING_POLICY=985;
	public static final int TOK_SELECT=986;
	public static final int TOK_SELECTDI=987;
	public static final int TOK_SELEXPR=988;
	public static final int TOK_SERDE=989;
	public static final int TOK_SERDENAME=990;
	public static final int TOK_SERDEPROPS=991;
	public static final int TOK_SERVER_TYPE=992;
	public static final int TOK_SETCOLREF=993;
	public static final int TOK_SET_AUTOCOMMIT=994;
	public static final int TOK_SET_COLUMNS_CLAUSE=995;
	public static final int TOK_SHOWCOLUMNS=996;
	public static final int TOK_SHOWCONF=997;
	public static final int TOK_SHOWDATABASES=998;
	public static final int TOK_SHOWDBLOCKS=999;
	public static final int TOK_SHOWFUNCTIONS=1000;
	public static final int TOK_SHOWLOCKS=1001;
	public static final int TOK_SHOWMATERIALIZEDVIEWS=1002;
	public static final int TOK_SHOWPARTITIONS=1003;
	public static final int TOK_SHOWTABLES=1004;
	public static final int TOK_SHOWVIEWS=1005;
	public static final int TOK_SHOW_COMPACTIONS=1006;
	public static final int TOK_SHOW_CREATEDATABASE=1007;
	public static final int TOK_SHOW_CREATETABLE=1008;
	public static final int TOK_SHOW_GRANT=1009;
	public static final int TOK_SHOW_ROLES=1010;
	public static final int TOK_SHOW_ROLE_GRANT=1011;
	public static final int TOK_SHOW_ROLE_PRINCIPALS=1012;
	public static final int TOK_SHOW_RP=1013;
	public static final int TOK_SHOW_SET_ROLE=1014;
	public static final int TOK_SHOW_TABLESTATUS=1015;
	public static final int TOK_SHOW_TBLPROPERTIES=1016;
	public static final int TOK_SHOW_TRANSACTIONS=1017;
	public static final int TOK_SKEWED_LOCATIONS=1018;
	public static final int TOK_SKEWED_LOCATION_LIST=1019;
	public static final int TOK_SKEWED_LOCATION_MAP=1020;
	public static final int TOK_SMALLINT=1021;
	public static final int TOK_SORTBY=1022;
	public static final int TOK_START_TRANSACTION=1023;
	public static final int TOK_STORAGEHANDLER=1024;
	public static final int TOK_STOREDASDIRS=1025;
	public static final int TOK_STRING=1026;
	public static final int TOK_STRINGLITERALSEQUENCE=1027;
	public static final int TOK_STRUCT=1028;
	public static final int TOK_SUBQUERY=1029;
	public static final int TOK_SUBQUERY_EXPR=1030;
	public static final int TOK_SUBQUERY_OP=1031;
	public static final int TOK_SUBQUERY_OP_NOTEXISTS=1032;
	public static final int TOK_SUBQUERY_OP_NOTIN=1033;
	public static final int TOK_SUMMARY=1034;
	public static final int TOK_SWITCHDATABASE=1035;
	public static final int TOK_TAB=1036;
	public static final int TOK_TABALIAS=1037;
	public static final int TOK_TABCOL=1038;
	public static final int TOK_TABCOLLIST=1039;
	public static final int TOK_TABCOLNAME=1040;
	public static final int TOK_TABCOLVALUE=1041;
	public static final int TOK_TABCOLVALUES=1042;
	public static final int TOK_TABCOLVALUE_PAIR=1043;
	public static final int TOK_TABLEBUCKETSAMPLE=1044;
	public static final int TOK_TABLECOMMENT=1045;
	public static final int TOK_TABLEFILEFORMAT=1046;
	public static final int TOK_TABLELOCATION=1047;
	public static final int TOK_TABLEPARTCOLS=1048;
	public static final int TOK_TABLEPROPERTIES=1049;
	public static final int TOK_TABLEPROPERTY=1050;
	public static final int TOK_TABLEPROPLIST=1051;
	public static final int TOK_TABLEROWFORMAT=1052;
	public static final int TOK_TABLEROWFORMATCOLLITEMS=1053;
	public static final int TOK_TABLEROWFORMATFIELD=1054;
	public static final int TOK_TABLEROWFORMATLINES=1055;
	public static final int TOK_TABLEROWFORMATMAPKEYS=1056;
	public static final int TOK_TABLEROWFORMATNULL=1057;
	public static final int TOK_TABLESERIALIZER=1058;
	public static final int TOK_TABLESKEWED=1059;
	public static final int TOK_TABLESPLITSAMPLE=1060;
	public static final int TOK_TABLE_OR_COL=1061;
	public static final int TOK_TABLE_PARTITION=1062;
	public static final int TOK_TABLE_TYPE=1063;
	public static final int TOK_TABNAME=1064;
	public static final int TOK_TABREF=1065;
	public static final int TOK_TABSORTCOLNAMEASC=1066;
	public static final int TOK_TABSORTCOLNAMEDESC=1067;
	public static final int TOK_TABSRC=1068;
	public static final int TOK_TABTYPE=1069;
	public static final int TOK_TEMPORARY=1070;
	public static final int TOK_TIMESTAMP=1071;
	public static final int TOK_TIMESTAMPLITERAL=1072;
	public static final int TOK_TIMESTAMPLOCALTZ=1073;
	public static final int TOK_TIMESTAMPLOCALTZLITERAL=1074;
	public static final int TOK_TINYINT=1075;
	public static final int TOK_TMP_FILE=1076;
	public static final int TOK_TO=1077;
	public static final int TOK_TRANSFORM=1078;
	public static final int TOK_TRIGGER_EXPRESSION=1079;
	public static final int TOK_TRUE=1080;
	public static final int TOK_TRUNCATETABLE=1081;
	public static final int TOK_TXN_ACCESS_MODE=1082;
	public static final int TOK_TXN_READ_ONLY=1083;
	public static final int TOK_TXN_READ_WRITE=1084;
	public static final int TOK_UNIONALL=1085;
	public static final int TOK_UNIONDISTINCT=1086;
	public static final int TOK_UNIONTYPE=1087;
	public static final int TOK_UNIQUE=1088;
	public static final int TOK_UNIQUEJOIN=1089;
	public static final int TOK_UNLOCKDB=1090;
	public static final int TOK_UNLOCKTABLE=1091;
	public static final int TOK_UNMANAGED=1092;
	public static final int TOK_UPDATE=1093;
	public static final int TOK_UPDATE_TABLE=1094;
	public static final int TOK_URI_TYPE=1095;
	public static final int TOK_USER=1096;
	public static final int TOK_USERSCRIPTCOLNAMES=1097;
	public static final int TOK_USERSCRIPTCOLSCHEMA=1098;
	public static final int TOK_VALIDATE=1099;
	public static final int TOK_VARCHAR=1100;
	public static final int TOK_VIEWPARTCOLS=1101;
	public static final int TOK_WHERE=1102;
	public static final int TOK_WINDOWDEF=1103;
	public static final int TOK_WINDOWRANGE=1104;
	public static final int TOK_WINDOWSPEC=1105;
	public static final int TOK_WINDOWVALUES=1106;

	// delegates
	public HiveParser_SelectClauseParser gSelectClauseParser;
	public HiveParser_FromClauseParser gFromClauseParser;
	public HiveParser_IdentifiersParser gIdentifiersParser;
	public HiveParser_ResourcePlanParser gResourcePlanParser;
	public Parser[] getDelegates() {
		return new Parser[] {gSelectClauseParser, gFromClauseParser, gIdentifiersParser, gResourcePlanParser};
	}

	// delegators


	public HiveParser(TokenStream input) {
		this(input, new RecognizerSharedState());
	}
	public HiveParser(TokenStream input, RecognizerSharedState state) {
		super(input, state);
		gSelectClauseParser = new HiveParser_SelectClauseParser(input, state, this);
		gFromClauseParser = new HiveParser_FromClauseParser(input, state, this);
		gIdentifiersParser = new HiveParser_IdentifiersParser(input, state, this);
		gResourcePlanParser = new HiveParser_ResourcePlanParser(input, state, this);
	}

	protected TreeAdaptor adaptor = new CommonTreeAdaptor();

	public void setTreeAdaptor(TreeAdaptor adaptor) {
		this.adaptor = adaptor;
		gSelectClauseParser.setTreeAdaptor(this.adaptor);gFromClauseParser.setTreeAdaptor(this.adaptor);gIdentifiersParser.setTreeAdaptor(this.adaptor);gResourcePlanParser.setTreeAdaptor(this.adaptor);
	}
	public TreeAdaptor getTreeAdaptor() {
		return adaptor;
	}
	@Override public String[] getTokenNames() { return HiveParser.tokenNames; }
	@Override public String getGrammarFileName() { return "org/apache/hadoop/hive/ql/parse/HiveParser.g"; }


	  ArrayList errors = new ArrayList();
	  Stack msgs = new Stack();

	  private static HashMap xlateMap;
	  static {
	    //this is used to support auto completion in CLI
	    xlateMap = new HashMap();

	    // Keywords
	    xlateMap.put("KW_TRUE", "TRUE");
	    xlateMap.put("KW_FALSE", "FALSE");
	    xlateMap.put("KW_ALL", "ALL");
	    xlateMap.put("KW_NONE", "NONE");
	    xlateMap.put("KW_AND", "AND");
	    xlateMap.put("KW_OR", "OR");
	    xlateMap.put("KW_NOT", "NOT");
	    xlateMap.put("KW_LIKE", "LIKE");

	    xlateMap.put("KW_ASC", "ASC");
	    xlateMap.put("KW_DESC", "DESC");
	    xlateMap.put("KW_NULLS", "NULLS");
	    xlateMap.put("KW_LAST", "LAST");
	    xlateMap.put("KW_ORDER", "ORDER");
	    xlateMap.put("KW_BY", "BY");
	    xlateMap.put("KW_GROUP", "GROUP");
	    xlateMap.put("KW_WHERE", "WHERE");
	    xlateMap.put("KW_FROM", "FROM");
	    xlateMap.put("KW_AS", "AS");
	    xlateMap.put("KW_SELECT", "SELECT");
	    xlateMap.put("KW_DISTINCT", "DISTINCT");
	    xlateMap.put("KW_INSERT", "INSERT");
	    xlateMap.put("KW_OVERWRITE", "OVERWRITE");
	    xlateMap.put("KW_OUTER", "OUTER");
	    xlateMap.put("KW_JOIN", "JOIN");
	    xlateMap.put("KW_LEFT", "LEFT");
	    xlateMap.put("KW_RIGHT", "RIGHT");
	    xlateMap.put("KW_FULL", "FULL");
	    xlateMap.put("KW_ON", "ON");
	    xlateMap.put("KW_PARTITION", "PARTITION");
	    xlateMap.put("KW_PARTITIONS", "PARTITIONS");
	    xlateMap.put("KW_TABLE", "TABLE");
	    xlateMap.put("KW_TABLES", "TABLES");
	    xlateMap.put("KW_TBLPROPERTIES", "TBLPROPERTIES");
	    xlateMap.put("KW_SHOW", "SHOW");
	    xlateMap.put("KW_MSCK", "MSCK");
	    xlateMap.put("KW_DIRECTORY", "DIRECTORY");
	    xlateMap.put("KW_LOCAL", "LOCAL");
	    xlateMap.put("KW_TRANSFORM", "TRANSFORM");
	    xlateMap.put("KW_USING", "USING");
	    xlateMap.put("KW_CLUSTER", "CLUSTER");
	    xlateMap.put("KW_DISTRIBUTE", "DISTRIBUTE");
	    xlateMap.put("KW_SORT", "SORT");
	    xlateMap.put("KW_SYNC", "SYNC");
	    xlateMap.put("KW_UNION", "UNION");
	    xlateMap.put("KW_INTERSECT", "INTERSECT");
	    xlateMap.put("KW_EXCEPT", "EXCEPT");
	    xlateMap.put("KW_LOAD", "LOAD");
	    xlateMap.put("KW_DATA", "DATA");
	    xlateMap.put("KW_INPATH", "INPATH");
	    xlateMap.put("KW_IS", "IS");
	    xlateMap.put("KW_NULL", "NULL");
	    xlateMap.put("KW_CREATE", "CREATE");
	    xlateMap.put("KW_EXTERNAL", "EXTERNAL");
	    xlateMap.put("KW_ALTER", "ALTER");
	    xlateMap.put("KW_DESCRIBE", "DESCRIBE");
	    xlateMap.put("KW_DROP", "DROP");
	    xlateMap.put("KW_RENAME", "RENAME");
	    xlateMap.put("KW_TO", "TO");
	    xlateMap.put("KW_COMMENT", "COMMENT");
	    xlateMap.put("KW_BOOLEAN", "BOOLEAN");
	    xlateMap.put("KW_TINYINT", "TINYINT");
	    xlateMap.put("KW_SMALLINT", "SMALLINT");
	    xlateMap.put("KW_INT", "INT");
	    xlateMap.put("KW_BIGINT", "BIGINT");
	    xlateMap.put("KW_FLOAT", "FLOAT");
	    xlateMap.put("KW_DOUBLE", "DOUBLE");
	    xlateMap.put("KW_PRECISION", "PRECISION");
	    xlateMap.put("KW_DATE", "DATE");
	    xlateMap.put("KW_DATETIME", "DATETIME");
	    xlateMap.put("KW_TIMESTAMP", "TIMESTAMP");
	    xlateMap.put("KW_TIMESTAMPLOCALTZ", "TIMESTAMPLOCALTZ");
	    xlateMap.put("KW_TIME", "TIME");
	    xlateMap.put("KW_ZONE", "ZONE");
	    xlateMap.put("KW_STRING", "STRING");
	    xlateMap.put("KW_BINARY", "BINARY");
	    xlateMap.put("KW_ARRAY", "ARRAY");
	    xlateMap.put("KW_MAP", "MAP");
	    xlateMap.put("KW_REDUCE", "REDUCE");
	    xlateMap.put("KW_PARTITIONED", "PARTITIONED");
	    xlateMap.put("KW_CLUSTERED", "CLUSTERED");
	    xlateMap.put("KW_SORTED", "SORTED");
	    xlateMap.put("KW_INTO", "INTO");
	    xlateMap.put("KW_BUCKETS", "BUCKETS");
	    xlateMap.put("KW_ROW", "ROW");
	    xlateMap.put("KW_FORMAT", "FORMAT");
	    xlateMap.put("KW_DELIMITED", "DELIMITED");
	    xlateMap.put("KW_FIELDS", "FIELDS");
	    xlateMap.put("KW_TERMINATED", "TERMINATED");
	    xlateMap.put("KW_COLLECTION", "COLLECTION");
	    xlateMap.put("KW_ITEMS", "ITEMS");
	    xlateMap.put("KW_KEYS", "KEYS");
	    xlateMap.put("KW_KEY_TYPE", "$KEY$");
	    xlateMap.put("KW_LINES", "LINES");
	    xlateMap.put("KW_STORED", "STORED");
	    xlateMap.put("KW_SEQUENCEFILE", "SEQUENCEFILE");
	    xlateMap.put("KW_TEXTFILE", "TEXTFILE");
	    xlateMap.put("KW_INPUTFORMAT", "INPUTFORMAT");
	    xlateMap.put("KW_OUTPUTFORMAT", "OUTPUTFORMAT");
	    xlateMap.put("KW_LOCATION", "LOCATION");
	    xlateMap.put("KW_TABLESAMPLE", "TABLESAMPLE");
	    xlateMap.put("KW_BUCKET", "BUCKET");
	    xlateMap.put("KW_OUT", "OUT");
	    xlateMap.put("KW_OF", "OF");
	    xlateMap.put("KW_CAST", "CAST");
	    xlateMap.put("KW_ADD", "ADD");
	    xlateMap.put("KW_REPLACE", "REPLACE");
	    xlateMap.put("KW_COLUMNS", "COLUMNS");
	    xlateMap.put("KW_RLIKE", "RLIKE");
	    xlateMap.put("KW_REGEXP", "REGEXP");
	    xlateMap.put("KW_TEMPORARY", "TEMPORARY");
	    xlateMap.put("KW_FUNCTION", "FUNCTION");
	    xlateMap.put("KW_EXPLAIN", "EXPLAIN");
	    xlateMap.put("KW_EXTENDED", "EXTENDED");
	    xlateMap.put("KW_SERDE", "SERDE");
	    xlateMap.put("KW_WITH", "WITH");
	    xlateMap.put("KW_SERDEPROPERTIES", "SERDEPROPERTIES");
	    xlateMap.put("KW_LIMIT", "LIMIT");
	    xlateMap.put("KW_OFFSET", "OFFSET");
	    xlateMap.put("KW_SET", "SET");
	    xlateMap.put("KW_PROPERTIES", "TBLPROPERTIES");
	    xlateMap.put("KW_VALUE_TYPE", "$VALUE$");
	    xlateMap.put("KW_ELEM_TYPE", "$ELEM$");
	    xlateMap.put("KW_DEFINED", "DEFINED");
	    xlateMap.put("KW_SUBQUERY", "SUBQUERY");
	    xlateMap.put("KW_REWRITE", "REWRITE");
	    xlateMap.put("KW_UPDATE", "UPDATE");
	    xlateMap.put("KW_VALUES", "VALUES");
	    xlateMap.put("KW_PURGE", "PURGE");
	    xlateMap.put("KW_UNIQUE", "UNIQUE");
	    xlateMap.put("KW_PRIMARY", "PRIMARY");
	    xlateMap.put("KW_FOREIGN", "FOREIGN");
	    xlateMap.put("KW_KEY", "KEY");
	    xlateMap.put("KW_REFERENCES", "REFERENCES");
	    xlateMap.put("KW_CONSTRAINT", "CONSTRAINT");
	    xlateMap.put("KW_ENABLE", "ENABLE");
	    xlateMap.put("KW_DISABLE", "DISABLE");
	    xlateMap.put("KW_VALIDATE", "VALIDATE");
	    xlateMap.put("KW_NOVALIDATE", "NOVALIDATE");
	    xlateMap.put("KW_RELY", "RELY");
	    xlateMap.put("KW_NORELY", "NORELY");
	    xlateMap.put("KW_ABORT", "ABORT");
	    xlateMap.put("KW_TRANSACTIONS", "TRANSACTIONS");
	    xlateMap.put("KW_COMPACTIONS", "COMPACTIONS");
	    xlateMap.put("KW_COMPACT", "COMPACT");
	    xlateMap.put("KW_WAIT", "WAIT");
	    xlateMap.put("KW_KILL", "KILL");
	    xlateMap.put("KW_QUERY", "QUERY");
	    xlateMap.put("KW_RESOURCE", "RESOURCE");
	    xlateMap.put("KW_PLAN", "PLAN");
	    xlateMap.put("KW_QUERY_PARALLELISM", "QUERY_PARALLELISM");
	    xlateMap.put("KW_PLANS", "PLANS");
	    xlateMap.put("KW_ACTIVATE", "ACTIVATE");
	    xlateMap.put("KW_DEFAULT", "DEFAULT");
	    xlateMap.put("KW_CHECK", "CHECK");
	    xlateMap.put("KW_POOL", "POOL");
	    xlateMap.put("KW_MOVE", "MOVE");
	    xlateMap.put("KW_DO", "DO");
	    xlateMap.put("KW_ALLOC_FRACTION", "ALLOC_FRACTION");
	    xlateMap.put("KW_SCHEDULING_POLICY", "SCHEDULING_POLICY");
	    xlateMap.put("KW_PATH", "PATH");

	    // Operators
	    xlateMap.put("DOT", ".");
	    xlateMap.put("COLON", ":");
	    xlateMap.put("COMMA", ",");
	    xlateMap.put("SEMICOLON", ");");

	    xlateMap.put("LPAREN", "(");
	    xlateMap.put("RPAREN", ")");
	    xlateMap.put("LSQUARE", "[");
	    xlateMap.put("RSQUARE", "]");

	    xlateMap.put("EQUAL", "=");
	    xlateMap.put("NOTEQUAL", "<>");
	    xlateMap.put("EQUAL_NS", "<=>");
	    xlateMap.put("LESSTHANOREQUALTO", "<=");
	    xlateMap.put("LESSTHAN", "<");
	    xlateMap.put("GREATERTHANOREQUALTO", ">=");
	    xlateMap.put("GREATERTHAN", ">");

	    xlateMap.put("DIVIDE", "/");
	    xlateMap.put("PLUS", "+");
	    xlateMap.put("MINUS", "-");
	    xlateMap.put("STAR", "*");
	    xlateMap.put("MOD", "%");

	    xlateMap.put("AMPERSAND", "&");
	    xlateMap.put("TILDE", "~");
	    xlateMap.put("BITWISEOR", "|");
	    xlateMap.put("BITWISEXOR", "^");
	    xlateMap.put("CharSetLiteral", "\\'");
	  }

	  public static Collection getKeywords() {
	    return xlateMap.values();
	  }

	  private static String xlate(String name) {

	    String ret = xlateMap.get(name);
	    if (ret == null) {
	      ret = name;
	    }

	    return ret;
	  }

	  @Override
	  public Object recoverFromMismatchedSet(IntStream input,
	      RecognitionException re, BitSet follow) throws RecognitionException {
	    throw re;
	  }

	  @Override
	  public void displayRecognitionError(String[] tokenNames,
	      RecognitionException e) {
	    errors.add(new ParseError(this, e, tokenNames));
	  }

	  @Override
	  public String getErrorHeader(RecognitionException e) {
	    String header = null;
	    if (e.charPositionInLine < 0 && input.LT(-1) != null) {
	      Token t = input.LT(-1);
	      header = "line " + t.getLine() + ":" + t.getCharPositionInLine();
	    } else {
	      header = super.getErrorHeader(e);
	    }

	    return header;
	  }
	  
	  @Override
	  public String getErrorMessage(RecognitionException e, String[] tokenNames) {
	    String msg = null;

	    // Translate the token names to something that the user can understand
	    String[] xlateNames = new String[tokenNames.length];
	    for (int i = 0; i < tokenNames.length; ++i) {
	      xlateNames[i] = HiveParser.xlate(tokenNames[i]);
	    }

	    if (e instanceof NoViableAltException) {
	      @SuppressWarnings("unused")
	      NoViableAltException nvae = (NoViableAltException) e;
	      // for development, can add
	      // "decision=<<"+nvae.grammarDecisionDescription+">>"
	      // and "(decision="+nvae.decisionNumber+") and
	      // "state "+nvae.stateNumber
	      msg = "cannot recognize input near"
	              + (input.LT(1) != null ? " " + getTokenErrorDisplay(input.LT(1)) : "")
	              + (input.LT(2) != null ? " " + getTokenErrorDisplay(input.LT(2)) : "")
	              + (input.LT(3) != null ? " " + getTokenErrorDisplay(input.LT(3)) : "");
	    } else if (e instanceof MismatchedTokenException) {
	      MismatchedTokenException mte = (MismatchedTokenException) e;
	      msg = super.getErrorMessage(e, xlateNames) + (input.LT(-1) == null ? "":" near '" + input.LT(-1).getText()) + "'";
	    } else if (e instanceof FailedPredicateException) {
	      FailedPredicateException fpe = (FailedPredicateException) e;
	      msg = "Failed to recognize predicate '" + fpe.token.getText() + "'. Failed rule: '" + fpe.ruleName + "'";
	    } else {
	      msg = super.getErrorMessage(e, xlateNames);
	    }

	    if (msgs.size() > 0) {
	      msg = msg + " in " + msgs.peek();
	    }
	    return msg;
	  }
	  
	  public void pushMsg(String msg, RecognizerSharedState state) {
	    // ANTLR generated code does not wrap the @init code wit this backtracking check,
	    //  even if the matching @after has it. If we have parser rules with that are doing
	    // some lookahead with syntactic predicates this can cause the push() and pop() calls
	    // to become unbalanced, so make sure both push/pop check the backtracking state.
	    if (state.backtracking == 0) {
	      msgs.push(msg);
	    }
	  }

	  public void popMsg(RecognizerSharedState state) {
	    if (state.backtracking == 0) {
	      Object o = msgs.pop();
	    }
	  }

	  // counter to generate unique union aliases
	  private int aliasCounter;
	  private String generateUnionAlias() {
	    return "_u" + (++aliasCounter);
	  }
	  private char [] excludedCharForColumnName = {'.', ':'};
	  private boolean containExcludedCharForCreateTableColumnName(String input) {
	    for(char c : excludedCharForColumnName) {
	      if(input.indexOf(c)>-1) {
	        return true;
	      }
	    }
	    return false;
	  }
	  private CommonTree throwSetOpException() throws RecognitionException {
	    throw new FailedPredicateException(input, "orderByClause clusterByClause distributeByClause sortByClause limitClause can only be applied to the whole union.", "");
	  }
	  private CommonTree throwColumnNameException() throws RecognitionException {
	    throw new FailedPredicateException(input, Arrays.toString(excludedCharForColumnName) + " can not be used in column name in create table statement.", "");
	  }
	  private Configuration hiveConf;
	  public void setHiveConf(Configuration hiveConf) {
	    this.hiveConf = hiveConf;
	  }


	public static class statement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "statement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:772:1: statement : ( explainStatement EOF | execStatement EOF );
	public final HiveParser.statement_return statement() throws RecognitionException {
		HiveParser.statement_return retval = new HiveParser.statement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token EOF2=null;
		Token EOF4=null;
		ParserRuleReturnScope explainStatement1 =null;
		ParserRuleReturnScope execStatement3 =null;

		ASTNode EOF2_tree=null;
		ASTNode EOF4_tree=null;

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:773:2: ( explainStatement EOF | execStatement EOF )
			int alt1=2;
			int LA1_0 = input.LA(1);
			if ( (LA1_0==KW_EXPLAIN) ) {
				alt1=1;
			}
			else if ( (LA1_0==KW_ABORT||(LA1_0 >= KW_ALTER && LA1_0 <= KW_ANALYZE)||LA1_0==KW_COMMIT||LA1_0==KW_CREATE||LA1_0==KW_DELETE||(LA1_0 >= KW_DESC && LA1_0 <= KW_DESCRIBE)||LA1_0==KW_DISABLE||LA1_0==KW_DROP||LA1_0==KW_ENABLE||LA1_0==KW_EXPORT||LA1_0==KW_FROM||LA1_0==KW_GRANT||LA1_0==KW_IMPORT||LA1_0==KW_INSERT||LA1_0==KW_KILL||LA1_0==KW_LOAD||LA1_0==KW_LOCK||LA1_0==KW_MAP||LA1_0==KW_MERGE||LA1_0==KW_MSCK||LA1_0==KW_REDUCE||LA1_0==KW_RELOAD||(LA1_0 >= KW_REPL && LA1_0 <= KW_REPLACE)||LA1_0==KW_REVOKE||LA1_0==KW_ROLLBACK||LA1_0==KW_SELECT||LA1_0==KW_SET||LA1_0==KW_SHOW||LA1_0==KW_START||LA1_0==KW_TRUNCATE||LA1_0==KW_UNLOCK||LA1_0==KW_UPDATE||LA1_0==KW_USE||LA1_0==KW_WITH||LA1_0==LPAREN) ) {
				alt1=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 1, 0, input);
				throw nvae;
			}

			switch (alt1) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:773:4: explainStatement EOF
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_explainStatement_in_statement1277);
					explainStatement1=explainStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, explainStatement1.getTree());

					EOF2=(Token)match(input,EOF,FOLLOW_EOF_in_statement1279); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					EOF2_tree = (ASTNode)adaptor.create(EOF2);
					adaptor.addChild(root_0, EOF2_tree);
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:774:4: execStatement EOF
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_execStatement_in_statement1284);
					execStatement3=execStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, execStatement3.getTree());

					EOF4=(Token)match(input,EOF,FOLLOW_EOF_in_statement1286); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					EOF4_tree = (ASTNode)adaptor.create(EOF4);
					adaptor.addChild(root_0, EOF4_tree);
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "statement"


	public static class explainStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "explainStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:777:1: explainStatement : KW_EXPLAIN ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) ) ;
	public final HiveParser.explainStatement_return explainStatement() throws RecognitionException {
		HiveParser.explainStatement_return retval = new HiveParser.explainStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_EXPLAIN5=null;
		Token KW_REWRITE8=null;
		ParserRuleReturnScope explainOption6 =null;
		ParserRuleReturnScope execStatement7 =null;
		ParserRuleReturnScope queryStatementExpression9 =null;

		ASTNode KW_EXPLAIN5_tree=null;
		ASTNode KW_REWRITE8_tree=null;
		RewriteRuleTokenStream stream_KW_REWRITE=new RewriteRuleTokenStream(adaptor,"token KW_REWRITE");
		RewriteRuleTokenStream stream_KW_EXPLAIN=new RewriteRuleTokenStream(adaptor,"token KW_EXPLAIN");
		RewriteRuleSubtreeStream stream_queryStatementExpression=new RewriteRuleSubtreeStream(adaptor,"rule queryStatementExpression");
		RewriteRuleSubtreeStream stream_explainOption=new RewriteRuleSubtreeStream(adaptor,"rule explainOption");
		RewriteRuleSubtreeStream stream_execStatement=new RewriteRuleSubtreeStream(adaptor,"rule execStatement");

		 pushMsg("explain statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:780:2: ( KW_EXPLAIN ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:780:4: KW_EXPLAIN ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) )
			{
			KW_EXPLAIN5=(Token)match(input,KW_EXPLAIN,FOLLOW_KW_EXPLAIN_in_explainStatement1307); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_EXPLAIN.add(KW_EXPLAIN5);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:780:15: ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) )
			int alt3=2;
			int LA3_0 = input.LA(1);
			if ( (LA3_0==KW_ABORT||(LA3_0 >= KW_ALTER && LA3_0 <= KW_ANALYZE)||LA3_0==KW_AUTHORIZATION||LA3_0==KW_COMMIT||LA3_0==KW_CREATE||LA3_0==KW_DELETE||(LA3_0 >= KW_DEPENDENCY && LA3_0 <= KW_DESCRIBE)||LA3_0==KW_DISABLE||LA3_0==KW_DROP||LA3_0==KW_ENABLE||LA3_0==KW_EXPORT||LA3_0==KW_EXTENDED||(LA3_0 >= KW_FORMATTED && LA3_0 <= KW_FROM)||LA3_0==KW_GRANT||LA3_0==KW_IMPORT||LA3_0==KW_INSERT||LA3_0==KW_KILL||LA3_0==KW_LOAD||LA3_0==KW_LOCK||LA3_0==KW_LOGICAL||LA3_0==KW_MAP||LA3_0==KW_MERGE||LA3_0==KW_MSCK||LA3_0==KW_REDUCE||LA3_0==KW_RELOAD||LA3_0==KW_REOPTIMIZATION||(LA3_0 >= KW_REPL && LA3_0 <= KW_REPLACE)||LA3_0==KW_REVOKE||LA3_0==KW_ROLLBACK||LA3_0==KW_SELECT||LA3_0==KW_SET||LA3_0==KW_SHOW||LA3_0==KW_START||LA3_0==KW_TRUNCATE||LA3_0==KW_UNLOCK||LA3_0==KW_UPDATE||LA3_0==KW_USE||LA3_0==KW_VECTORIZATION||LA3_0==KW_WITH||LA3_0==LPAREN) ) {
				alt3=1;
			}
			else if ( (LA3_0==KW_REWRITE) ) {
				alt3=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 3, 0, input);
				throw nvae;
			}

			switch (alt3) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:781:6: ( explainOption )* execStatement
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:781:6: ( explainOption )*
					loop2:
					while (true) {
						int alt2=2;
						alt2 = dfa2.predict(input);
						switch (alt2) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:781:6: explainOption
							{
							pushFollow(FOLLOW_explainOption_in_explainStatement1316);
							explainOption6=explainOption();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_explainOption.add(explainOption6.getTree());
							}
							break;

						default :
							break loop2;
						}
					}

					pushFollow(FOLLOW_execStatement_in_explainStatement1319);
					execStatement7=execStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_execStatement.add(execStatement7.getTree());
					// AST REWRITE
					// elements: explainOption, execStatement
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 781:35: -> ^( TOK_EXPLAIN execStatement ( explainOption )* )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:781:38: ^( TOK_EXPLAIN execStatement ( explainOption )* )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXPLAIN, "TOK_EXPLAIN"), root_1);
						adaptor.addChild(root_1, stream_execStatement.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:781:66: ( explainOption )*
						while ( stream_explainOption.hasNext() ) {
							adaptor.addChild(root_1, stream_explainOption.nextTree());
						}
						stream_explainOption.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:783:9: KW_REWRITE queryStatementExpression
					{
					KW_REWRITE8=(Token)match(input,KW_REWRITE,FOLLOW_KW_REWRITE_in_explainStatement1350); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_REWRITE.add(KW_REWRITE8);

					pushFollow(FOLLOW_queryStatementExpression_in_explainStatement1352);
					queryStatementExpression9=queryStatementExpression();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_queryStatementExpression.add(queryStatementExpression9.getTree());
					// AST REWRITE
					// elements: queryStatementExpression
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 783:45: -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:783:48: ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXPLAIN_SQ_REWRITE, "TOK_EXPLAIN_SQ_REWRITE"), root_1);
						adaptor.addChild(root_1, stream_queryStatementExpression.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "explainStatement"


	public static class explainOption_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "explainOption"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:787:1: explainOption : ( KW_EXTENDED | KW_FORMATTED | KW_DEPENDENCY | KW_LOGICAL | KW_AUTHORIZATION | KW_ANALYZE | KW_REOPTIMIZATION | ( KW_VECTORIZATION ( vectorizationOnly )? ( vectorizatonDetail )? ) );
	public final HiveParser.explainOption_return explainOption() throws RecognitionException {
		HiveParser.explainOption_return retval = new HiveParser.explainOption_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_EXTENDED10=null;
		Token KW_FORMATTED11=null;
		Token KW_DEPENDENCY12=null;
		Token KW_LOGICAL13=null;
		Token KW_AUTHORIZATION14=null;
		Token KW_ANALYZE15=null;
		Token KW_REOPTIMIZATION16=null;
		Token KW_VECTORIZATION17=null;
		ParserRuleReturnScope vectorizationOnly18 =null;
		ParserRuleReturnScope vectorizatonDetail19 =null;

		ASTNode KW_EXTENDED10_tree=null;
		ASTNode KW_FORMATTED11_tree=null;
		ASTNode KW_DEPENDENCY12_tree=null;
		ASTNode KW_LOGICAL13_tree=null;
		ASTNode KW_AUTHORIZATION14_tree=null;
		ASTNode KW_ANALYZE15_tree=null;
		ASTNode KW_REOPTIMIZATION16_tree=null;
		ASTNode KW_VECTORIZATION17_tree=null;

		 msgs.push("explain option"); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:790:5: ( KW_EXTENDED | KW_FORMATTED | KW_DEPENDENCY | KW_LOGICAL | KW_AUTHORIZATION | KW_ANALYZE | KW_REOPTIMIZATION | ( KW_VECTORIZATION ( vectorizationOnly )? ( vectorizatonDetail )? ) )
			int alt6=8;
			switch ( input.LA(1) ) {
			case KW_EXTENDED:
				{
				alt6=1;
				}
				break;
			case KW_FORMATTED:
				{
				alt6=2;
				}
				break;
			case KW_DEPENDENCY:
				{
				alt6=3;
				}
				break;
			case KW_LOGICAL:
				{
				alt6=4;
				}
				break;
			case KW_AUTHORIZATION:
				{
				alt6=5;
				}
				break;
			case KW_ANALYZE:
				{
				alt6=6;
				}
				break;
			case KW_REOPTIMIZATION:
				{
				alt6=7;
				}
				break;
			case KW_VECTORIZATION:
				{
				alt6=8;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 6, 0, input);
				throw nvae;
			}
			switch (alt6) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:790:7: KW_EXTENDED
					{
					root_0 = (ASTNode)adaptor.nil();


					KW_EXTENDED10=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_explainOption1392); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_EXTENDED10_tree = (ASTNode)adaptor.create(KW_EXTENDED10);
					adaptor.addChild(root_0, KW_EXTENDED10_tree);
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:791:7: KW_FORMATTED
					{
					root_0 = (ASTNode)adaptor.nil();


					KW_FORMATTED11=(Token)match(input,KW_FORMATTED,FOLLOW_KW_FORMATTED_in_explainOption1400); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_FORMATTED11_tree = (ASTNode)adaptor.create(KW_FORMATTED11);
					adaptor.addChild(root_0, KW_FORMATTED11_tree);
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:792:7: KW_DEPENDENCY
					{
					root_0 = (ASTNode)adaptor.nil();


					KW_DEPENDENCY12=(Token)match(input,KW_DEPENDENCY,FOLLOW_KW_DEPENDENCY_in_explainOption1408); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_DEPENDENCY12_tree = (ASTNode)adaptor.create(KW_DEPENDENCY12);
					adaptor.addChild(root_0, KW_DEPENDENCY12_tree);
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:793:7: KW_LOGICAL
					{
					root_0 = (ASTNode)adaptor.nil();


					KW_LOGICAL13=(Token)match(input,KW_LOGICAL,FOLLOW_KW_LOGICAL_in_explainOption1416); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_LOGICAL13_tree = (ASTNode)adaptor.create(KW_LOGICAL13);
					adaptor.addChild(root_0, KW_LOGICAL13_tree);
					}

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:794:7: KW_AUTHORIZATION
					{
					root_0 = (ASTNode)adaptor.nil();


					KW_AUTHORIZATION14=(Token)match(input,KW_AUTHORIZATION,FOLLOW_KW_AUTHORIZATION_in_explainOption1424); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_AUTHORIZATION14_tree = (ASTNode)adaptor.create(KW_AUTHORIZATION14);
					adaptor.addChild(root_0, KW_AUTHORIZATION14_tree);
					}

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:795:7: KW_ANALYZE
					{
					root_0 = (ASTNode)adaptor.nil();


					KW_ANALYZE15=(Token)match(input,KW_ANALYZE,FOLLOW_KW_ANALYZE_in_explainOption1432); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_ANALYZE15_tree = (ASTNode)adaptor.create(KW_ANALYZE15);
					adaptor.addChild(root_0, KW_ANALYZE15_tree);
					}

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:796:7: KW_REOPTIMIZATION
					{
					root_0 = (ASTNode)adaptor.nil();


					KW_REOPTIMIZATION16=(Token)match(input,KW_REOPTIMIZATION,FOLLOW_KW_REOPTIMIZATION_in_explainOption1440); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_REOPTIMIZATION16_tree = (ASTNode)adaptor.create(KW_REOPTIMIZATION16);
					adaptor.addChild(root_0, KW_REOPTIMIZATION16_tree);
					}

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:797:7: ( KW_VECTORIZATION ( vectorizationOnly )? ( vectorizatonDetail )? )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:797:7: ( KW_VECTORIZATION ( vectorizationOnly )? ( vectorizatonDetail )? )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:797:8: KW_VECTORIZATION ( vectorizationOnly )? ( vectorizatonDetail )?
					{
					KW_VECTORIZATION17=(Token)match(input,KW_VECTORIZATION,FOLLOW_KW_VECTORIZATION_in_explainOption1449); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					KW_VECTORIZATION17_tree = (ASTNode)adaptor.create(KW_VECTORIZATION17);
					adaptor.addChild(root_0, KW_VECTORIZATION17_tree);
					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:797:25: ( vectorizationOnly )?
					int alt4=2;
					int LA4_0 = input.LA(1);
					if ( (LA4_0==KW_ONLY) ) {
						alt4=1;
					}
					switch (alt4) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:797:25: vectorizationOnly
							{
							pushFollow(FOLLOW_vectorizationOnly_in_explainOption1451);
							vectorizationOnly18=vectorizationOnly();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) adaptor.addChild(root_0, vectorizationOnly18.getTree());

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:797:44: ( vectorizatonDetail )?
					int alt5=2;
					int LA5_0 = input.LA(1);
					if ( (LA5_0==KW_DETAIL||LA5_0==KW_EXPRESSION||LA5_0==KW_OPERATOR||LA5_0==KW_SUMMARY) ) {
						alt5=1;
					}
					switch (alt5) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:797:44: vectorizatonDetail
							{
							pushFollow(FOLLOW_vectorizatonDetail_in_explainOption1454);
							vectorizatonDetail19=vectorizatonDetail();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) adaptor.addChild(root_0, vectorizatonDetail19.getTree());

							}
							break;

					}

					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { msgs.pop(); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "explainOption"


	public static class vectorizationOnly_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "vectorizationOnly"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:800:1: vectorizationOnly : KW_ONLY -> ^( TOK_ONLY ) ;
	public final HiveParser.vectorizationOnly_return vectorizationOnly() throws RecognitionException {
		HiveParser.vectorizationOnly_return retval = new HiveParser.vectorizationOnly_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ONLY20=null;

		ASTNode KW_ONLY20_tree=null;
		RewriteRuleTokenStream stream_KW_ONLY=new RewriteRuleTokenStream(adaptor,"token KW_ONLY");

		 pushMsg("vectorization's only clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:803:5: ( KW_ONLY -> ^( TOK_ONLY ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:803:7: KW_ONLY
			{
			KW_ONLY20=(Token)match(input,KW_ONLY,FOLLOW_KW_ONLY_in_vectorizationOnly1483); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ONLY.add(KW_ONLY20);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 804:5: -> ^( TOK_ONLY )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:804:8: ^( TOK_ONLY )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ONLY, "TOK_ONLY"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "vectorizationOnly"


	public static class vectorizatonDetail_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "vectorizatonDetail"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:807:1: vectorizatonDetail : ( KW_SUMMARY -> ^( TOK_SUMMARY ) | KW_OPERATOR -> ^( TOK_OPERATOR ) | KW_EXPRESSION -> ^( TOK_EXPRESSION ) | KW_DETAIL -> ^( TOK_DETAIL ) );
	public final HiveParser.vectorizatonDetail_return vectorizatonDetail() throws RecognitionException {
		HiveParser.vectorizatonDetail_return retval = new HiveParser.vectorizatonDetail_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SUMMARY21=null;
		Token KW_OPERATOR22=null;
		Token KW_EXPRESSION23=null;
		Token KW_DETAIL24=null;

		ASTNode KW_SUMMARY21_tree=null;
		ASTNode KW_OPERATOR22_tree=null;
		ASTNode KW_EXPRESSION23_tree=null;
		ASTNode KW_DETAIL24_tree=null;
		RewriteRuleTokenStream stream_KW_SUMMARY=new RewriteRuleTokenStream(adaptor,"token KW_SUMMARY");
		RewriteRuleTokenStream stream_KW_DETAIL=new RewriteRuleTokenStream(adaptor,"token KW_DETAIL");
		RewriteRuleTokenStream stream_KW_OPERATOR=new RewriteRuleTokenStream(adaptor,"token KW_OPERATOR");
		RewriteRuleTokenStream stream_KW_EXPRESSION=new RewriteRuleTokenStream(adaptor,"token KW_EXPRESSION");

		 pushMsg("vectorization's detail level clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:810:5: ( KW_SUMMARY -> ^( TOK_SUMMARY ) | KW_OPERATOR -> ^( TOK_OPERATOR ) | KW_EXPRESSION -> ^( TOK_EXPRESSION ) | KW_DETAIL -> ^( TOK_DETAIL ) )
			int alt7=4;
			switch ( input.LA(1) ) {
			case KW_SUMMARY:
				{
				alt7=1;
				}
				break;
			case KW_OPERATOR:
				{
				alt7=2;
				}
				break;
			case KW_EXPRESSION:
				{
				alt7=3;
				}
				break;
			case KW_DETAIL:
				{
				alt7=4;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 7, 0, input);
				throw nvae;
			}
			switch (alt7) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:810:7: KW_SUMMARY
					{
					KW_SUMMARY21=(Token)match(input,KW_SUMMARY,FOLLOW_KW_SUMMARY_in_vectorizatonDetail1520); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SUMMARY.add(KW_SUMMARY21);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 811:5: -> ^( TOK_SUMMARY )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:811:8: ^( TOK_SUMMARY )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SUMMARY, "TOK_SUMMARY"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:812:7: KW_OPERATOR
					{
					KW_OPERATOR22=(Token)match(input,KW_OPERATOR,FOLLOW_KW_OPERATOR_in_vectorizatonDetail1538); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_OPERATOR.add(KW_OPERATOR22);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 813:5: -> ^( TOK_OPERATOR )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:813:8: ^( TOK_OPERATOR )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_OPERATOR, "TOK_OPERATOR"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:814:7: KW_EXPRESSION
					{
					KW_EXPRESSION23=(Token)match(input,KW_EXPRESSION,FOLLOW_KW_EXPRESSION_in_vectorizatonDetail1556); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_EXPRESSION.add(KW_EXPRESSION23);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 815:5: -> ^( TOK_EXPRESSION )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:815:8: ^( TOK_EXPRESSION )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXPRESSION, "TOK_EXPRESSION"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:816:7: KW_DETAIL
					{
					KW_DETAIL24=(Token)match(input,KW_DETAIL,FOLLOW_KW_DETAIL_in_vectorizatonDetail1574); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DETAIL.add(KW_DETAIL24);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 817:5: -> ^( TOK_DETAIL )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:817:8: ^( TOK_DETAIL )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DETAIL, "TOK_DETAIL"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "vectorizatonDetail"


	public static class execStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "execStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:820:1: execStatement : ( queryStatementExpression | loadStatement | exportStatement | importStatement | replDumpStatement | replLoadStatement | replStatusStatement | ddlStatement | deleteStatement | updateStatement | sqlTransactionStatement | mergeStatement );
	public final HiveParser.execStatement_return execStatement() throws RecognitionException {
		HiveParser.execStatement_return retval = new HiveParser.execStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope queryStatementExpression25 =null;
		ParserRuleReturnScope loadStatement26 =null;
		ParserRuleReturnScope exportStatement27 =null;
		ParserRuleReturnScope importStatement28 =null;
		ParserRuleReturnScope replDumpStatement29 =null;
		ParserRuleReturnScope replLoadStatement30 =null;
		ParserRuleReturnScope replStatusStatement31 =null;
		ParserRuleReturnScope ddlStatement32 =null;
		ParserRuleReturnScope deleteStatement33 =null;
		ParserRuleReturnScope updateStatement34 =null;
		ParserRuleReturnScope sqlTransactionStatement35 =null;
		ParserRuleReturnScope mergeStatement36 =null;


		 pushMsg("statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:823:5: ( queryStatementExpression | loadStatement | exportStatement | importStatement | replDumpStatement | replLoadStatement | replStatusStatement | ddlStatement | deleteStatement | updateStatement | sqlTransactionStatement | mergeStatement )
			int alt8=12;
			switch ( input.LA(1) ) {
			case KW_FROM:
			case KW_INSERT:
			case KW_MAP:
			case KW_REDUCE:
			case KW_SELECT:
			case KW_WITH:
			case LPAREN:
				{
				alt8=1;
				}
				break;
			case KW_LOAD:
				{
				alt8=2;
				}
				break;
			case KW_EXPORT:
				{
				alt8=3;
				}
				break;
			case KW_IMPORT:
				{
				alt8=4;
				}
				break;
			case KW_REPL:
				{
				switch ( input.LA(2) ) {
				case KW_DUMP:
					{
					alt8=5;
					}
					break;
				case KW_LOAD:
					{
					alt8=6;
					}
					break;
				case KW_STATUS:
					{
					alt8=7;
					}
					break;
				default:
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 8, 11, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}
				}
				break;
			case KW_ABORT:
			case KW_ALTER:
			case KW_ANALYZE:
			case KW_CREATE:
			case KW_DESC:
			case KW_DESCRIBE:
			case KW_DISABLE:
			case KW_DROP:
			case KW_ENABLE:
			case KW_GRANT:
			case KW_KILL:
			case KW_LOCK:
			case KW_MSCK:
			case KW_RELOAD:
			case KW_REPLACE:
			case KW_REVOKE:
			case KW_SHOW:
			case KW_TRUNCATE:
			case KW_UNLOCK:
			case KW_USE:
				{
				alt8=8;
				}
				break;
			case KW_SET:
				{
				int LA8_27 = input.LA(2);
				if ( (LA8_27==KW_ROLE) ) {
					alt8=8;
				}
				else if ( (LA8_27==KW_AUTOCOMMIT) ) {
					alt8=11;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 8, 27, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_DELETE:
				{
				alt8=9;
				}
				break;
			case KW_UPDATE:
				{
				alt8=10;
				}
				break;
			case KW_COMMIT:
			case KW_ROLLBACK:
			case KW_START:
				{
				alt8=11;
				}
				break;
			case KW_MERGE:
				{
				alt8=12;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 8, 0, input);
				throw nvae;
			}
			switch (alt8) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:823:7: queryStatementExpression
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_queryStatementExpression_in_execStatement1611);
					queryStatementExpression25=queryStatementExpression();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, queryStatementExpression25.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:824:7: loadStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_loadStatement_in_execStatement1619);
					loadStatement26=loadStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, loadStatement26.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:825:7: exportStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_exportStatement_in_execStatement1627);
					exportStatement27=exportStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, exportStatement27.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:826:7: importStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_importStatement_in_execStatement1635);
					importStatement28=importStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, importStatement28.getTree());

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:827:7: replDumpStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_replDumpStatement_in_execStatement1643);
					replDumpStatement29=replDumpStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, replDumpStatement29.getTree());

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:828:7: replLoadStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_replLoadStatement_in_execStatement1651);
					replLoadStatement30=replLoadStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, replLoadStatement30.getTree());

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:829:7: replStatusStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_replStatusStatement_in_execStatement1659);
					replStatusStatement31=replStatusStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, replStatusStatement31.getTree());

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:830:7: ddlStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_ddlStatement_in_execStatement1667);
					ddlStatement32=ddlStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, ddlStatement32.getTree());

					}
					break;
				case 9 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:831:7: deleteStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_deleteStatement_in_execStatement1675);
					deleteStatement33=deleteStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, deleteStatement33.getTree());

					}
					break;
				case 10 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:832:7: updateStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_updateStatement_in_execStatement1683);
					updateStatement34=updateStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, updateStatement34.getTree());

					}
					break;
				case 11 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:833:7: sqlTransactionStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_sqlTransactionStatement_in_execStatement1691);
					sqlTransactionStatement35=sqlTransactionStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, sqlTransactionStatement35.getTree());

					}
					break;
				case 12 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:834:7: mergeStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_mergeStatement_in_execStatement1699);
					mergeStatement36=mergeStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, mergeStatement36.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "execStatement"


	public static class loadStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "loadStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:837:1: loadStatement : KW_LOAD KW_DATA (islocal= KW_LOCAL )? KW_INPATH (path= StringLiteral ) (isoverwrite= KW_OVERWRITE )? KW_INTO KW_TABLE (tab= tableOrPartition ) ( inputFileFormat )? -> ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? ( inputFileFormat )? ) ;
	public final HiveParser.loadStatement_return loadStatement() throws RecognitionException {
		HiveParser.loadStatement_return retval = new HiveParser.loadStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token islocal=null;
		Token path=null;
		Token isoverwrite=null;
		Token KW_LOAD37=null;
		Token KW_DATA38=null;
		Token KW_INPATH39=null;
		Token KW_INTO40=null;
		Token KW_TABLE41=null;
		ParserRuleReturnScope tab =null;
		ParserRuleReturnScope inputFileFormat42 =null;

		ASTNode islocal_tree=null;
		ASTNode path_tree=null;
		ASTNode isoverwrite_tree=null;
		ASTNode KW_LOAD37_tree=null;
		ASTNode KW_DATA38_tree=null;
		ASTNode KW_INPATH39_tree=null;
		ASTNode KW_INTO40_tree=null;
		ASTNode KW_TABLE41_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
		RewriteRuleTokenStream stream_KW_INPATH=new RewriteRuleTokenStream(adaptor,"token KW_INPATH");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_OVERWRITE=new RewriteRuleTokenStream(adaptor,"token KW_OVERWRITE");
		RewriteRuleTokenStream stream_KW_LOAD=new RewriteRuleTokenStream(adaptor,"token KW_LOAD");
		RewriteRuleTokenStream stream_KW_DATA=new RewriteRuleTokenStream(adaptor,"token KW_DATA");
		RewriteRuleTokenStream stream_KW_LOCAL=new RewriteRuleTokenStream(adaptor,"token KW_LOCAL");
		RewriteRuleSubtreeStream stream_inputFileFormat=new RewriteRuleSubtreeStream(adaptor,"rule inputFileFormat");
		RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");

		 pushMsg("load statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:5: ( KW_LOAD KW_DATA (islocal= KW_LOCAL )? KW_INPATH (path= StringLiteral ) (isoverwrite= KW_OVERWRITE )? KW_INTO KW_TABLE (tab= tableOrPartition ) ( inputFileFormat )? -> ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? ( inputFileFormat )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:7: KW_LOAD KW_DATA (islocal= KW_LOCAL )? KW_INPATH (path= StringLiteral ) (isoverwrite= KW_OVERWRITE )? KW_INTO KW_TABLE (tab= tableOrPartition ) ( inputFileFormat )?
			{
			KW_LOAD37=(Token)match(input,KW_LOAD,FOLLOW_KW_LOAD_in_loadStatement1726); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOAD.add(KW_LOAD37);

			KW_DATA38=(Token)match(input,KW_DATA,FOLLOW_KW_DATA_in_loadStatement1728); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DATA.add(KW_DATA38);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:23: (islocal= KW_LOCAL )?
			int alt9=2;
			int LA9_0 = input.LA(1);
			if ( (LA9_0==KW_LOCAL) ) {
				alt9=1;
			}
			switch (alt9) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:24: islocal= KW_LOCAL
					{
					islocal=(Token)match(input,KW_LOCAL,FOLLOW_KW_LOCAL_in_loadStatement1733); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LOCAL.add(islocal);

					}
					break;

			}

			KW_INPATH39=(Token)match(input,KW_INPATH,FOLLOW_KW_INPATH_in_loadStatement1737); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_INPATH.add(KW_INPATH39);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:53: (path= StringLiteral )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:54: path= StringLiteral
			{
			path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_loadStatement1742); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(path);

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:74: (isoverwrite= KW_OVERWRITE )?
			int alt10=2;
			int LA10_0 = input.LA(1);
			if ( (LA10_0==KW_OVERWRITE) ) {
				alt10=1;
			}
			switch (alt10) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:75: isoverwrite= KW_OVERWRITE
					{
					isoverwrite=(Token)match(input,KW_OVERWRITE,FOLLOW_KW_OVERWRITE_in_loadStatement1748); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_OVERWRITE.add(isoverwrite);

					}
					break;

			}

			KW_INTO40=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_loadStatement1752); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_INTO.add(KW_INTO40);

			KW_TABLE41=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_loadStatement1754); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE41);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:119: (tab= tableOrPartition )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:120: tab= tableOrPartition
			{
			pushFollow(FOLLOW_tableOrPartition_in_loadStatement1759);
			tab=tableOrPartition();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableOrPartition.add(tab.getTree());
			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:142: ( inputFileFormat )?
			int alt11=2;
			int LA11_0 = input.LA(1);
			if ( (LA11_0==KW_INPUTFORMAT) ) {
				alt11=1;
			}
			switch (alt11) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:840:142: inputFileFormat
					{
					pushFollow(FOLLOW_inputFileFormat_in_loadStatement1762);
					inputFileFormat42=inputFileFormat();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_inputFileFormat.add(inputFileFormat42.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: inputFileFormat, islocal, path, isoverwrite, tab
			// token labels: islocal, path, isoverwrite
			// rule labels: tab, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_islocal=new RewriteRuleTokenStream(adaptor,"token islocal",islocal);
			RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
			RewriteRuleTokenStream stream_isoverwrite=new RewriteRuleTokenStream(adaptor,"token isoverwrite",isoverwrite);
			RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 841:5: -> ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? ( inputFileFormat )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:841:8: ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? ( inputFileFormat )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_LOAD, "TOK_LOAD"), root_1);
				adaptor.addChild(root_1, stream_path.nextNode());
				adaptor.addChild(root_1, stream_tab.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:841:31: ( $islocal)?
				if ( stream_islocal.hasNext() ) {
					adaptor.addChild(root_1, stream_islocal.nextNode());
				}
				stream_islocal.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:841:41: ( $isoverwrite)?
				if ( stream_isoverwrite.hasNext() ) {
					adaptor.addChild(root_1, stream_isoverwrite.nextNode());
				}
				stream_isoverwrite.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:841:54: ( inputFileFormat )?
				if ( stream_inputFileFormat.hasNext() ) {
					adaptor.addChild(root_1, stream_inputFileFormat.nextTree());
				}
				stream_inputFileFormat.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "loadStatement"


	public static class replicationClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "replicationClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:844:1: replicationClause : KW_FOR (isMetadataOnly= KW_METADATA )? KW_REPLICATION LPAREN (replId= StringLiteral ) RPAREN -> ^( TOK_REPLICATION $replId ( $isMetadataOnly)? ) ;
	public final HiveParser.replicationClause_return replicationClause() throws RecognitionException {
		HiveParser.replicationClause_return retval = new HiveParser.replicationClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token isMetadataOnly=null;
		Token replId=null;
		Token KW_FOR43=null;
		Token KW_REPLICATION44=null;
		Token LPAREN45=null;
		Token RPAREN46=null;

		ASTNode isMetadataOnly_tree=null;
		ASTNode replId_tree=null;
		ASTNode KW_FOR43_tree=null;
		ASTNode KW_REPLICATION44_tree=null;
		ASTNode LPAREN45_tree=null;
		ASTNode RPAREN46_tree=null;
		RewriteRuleTokenStream stream_KW_REPLICATION=new RewriteRuleTokenStream(adaptor,"token KW_REPLICATION");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_METADATA=new RewriteRuleTokenStream(adaptor,"token KW_METADATA");

		 pushMsg("replication clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:847:5: ( KW_FOR (isMetadataOnly= KW_METADATA )? KW_REPLICATION LPAREN (replId= StringLiteral ) RPAREN -> ^( TOK_REPLICATION $replId ( $isMetadataOnly)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:847:7: KW_FOR (isMetadataOnly= KW_METADATA )? KW_REPLICATION LPAREN (replId= StringLiteral ) RPAREN
			{
			KW_FOR43=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_replicationClause1817); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FOR.add(KW_FOR43);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:847:14: (isMetadataOnly= KW_METADATA )?
			int alt12=2;
			int LA12_0 = input.LA(1);
			if ( (LA12_0==KW_METADATA) ) {
				alt12=1;
			}
			switch (alt12) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:847:15: isMetadataOnly= KW_METADATA
					{
					isMetadataOnly=(Token)match(input,KW_METADATA,FOLLOW_KW_METADATA_in_replicationClause1822); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_METADATA.add(isMetadataOnly);

					}
					break;

			}

			KW_REPLICATION44=(Token)match(input,KW_REPLICATION,FOLLOW_KW_REPLICATION_in_replicationClause1826); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REPLICATION.add(KW_REPLICATION44);

			LPAREN45=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_replicationClause1828); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN45);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:847:66: (replId= StringLiteral )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:847:67: replId= StringLiteral
			{
			replId=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_replicationClause1833); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(replId);

			}

			RPAREN46=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_replicationClause1836); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN46);

			// AST REWRITE
			// elements: isMetadataOnly, replId
			// token labels: replId, isMetadataOnly
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_replId=new RewriteRuleTokenStream(adaptor,"token replId",replId);
			RewriteRuleTokenStream stream_isMetadataOnly=new RewriteRuleTokenStream(adaptor,"token isMetadataOnly",isMetadataOnly);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 848:5: -> ^( TOK_REPLICATION $replId ( $isMetadataOnly)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:848:8: ^( TOK_REPLICATION $replId ( $isMetadataOnly)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REPLICATION, "TOK_REPLICATION"), root_1);
				adaptor.addChild(root_1, stream_replId.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:848:35: ( $isMetadataOnly)?
				if ( stream_isMetadataOnly.hasNext() ) {
					adaptor.addChild(root_1, stream_isMetadataOnly.nextNode());
				}
				stream_isMetadataOnly.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "replicationClause"


	public static class exportStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "exportStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:851:1: exportStatement : KW_EXPORT KW_TABLE (tab= tableOrPartition ) KW_TO (path= StringLiteral ) ( replicationClause )? -> ^( TOK_EXPORT $tab $path ( replicationClause )? ) ;
	public final HiveParser.exportStatement_return exportStatement() throws RecognitionException {
		HiveParser.exportStatement_return retval = new HiveParser.exportStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token path=null;
		Token KW_EXPORT47=null;
		Token KW_TABLE48=null;
		Token KW_TO49=null;
		ParserRuleReturnScope tab =null;
		ParserRuleReturnScope replicationClause50 =null;

		ASTNode path_tree=null;
		ASTNode KW_EXPORT47_tree=null;
		ASTNode KW_TABLE48_tree=null;
		ASTNode KW_TO49_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
		RewriteRuleTokenStream stream_KW_EXPORT=new RewriteRuleTokenStream(adaptor,"token KW_EXPORT");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
		RewriteRuleSubtreeStream stream_replicationClause=new RewriteRuleSubtreeStream(adaptor,"rule replicationClause");

		 pushMsg("export statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:854:5: ( KW_EXPORT KW_TABLE (tab= tableOrPartition ) KW_TO (path= StringLiteral ) ( replicationClause )? -> ^( TOK_EXPORT $tab $path ( replicationClause )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:854:7: KW_EXPORT KW_TABLE (tab= tableOrPartition ) KW_TO (path= StringLiteral ) ( replicationClause )?
			{
			KW_EXPORT47=(Token)match(input,KW_EXPORT,FOLLOW_KW_EXPORT_in_exportStatement1880); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_EXPORT.add(KW_EXPORT47);

			KW_TABLE48=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_exportStatement1888); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE48);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:855:16: (tab= tableOrPartition )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:855:17: tab= tableOrPartition
			{
			pushFollow(FOLLOW_tableOrPartition_in_exportStatement1893);
			tab=tableOrPartition();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableOrPartition.add(tab.getTree());
			}

			KW_TO49=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_exportStatement1902); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TO.add(KW_TO49);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:856:13: (path= StringLiteral )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:856:14: path= StringLiteral
			{
			path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_exportStatement1907); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(path);

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:857:7: ( replicationClause )?
			int alt13=2;
			int LA13_0 = input.LA(1);
			if ( (LA13_0==KW_FOR) ) {
				alt13=1;
			}
			switch (alt13) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:857:7: replicationClause
					{
					pushFollow(FOLLOW_replicationClause_in_exportStatement1916);
					replicationClause50=replicationClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_replicationClause.add(replicationClause50.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: replicationClause, tab, path
			// token labels: path
			// rule labels: tab, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
			RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 858:5: -> ^( TOK_EXPORT $tab $path ( replicationClause )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:858:8: ^( TOK_EXPORT $tab $path ( replicationClause )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXPORT, "TOK_EXPORT"), root_1);
				adaptor.addChild(root_1, stream_tab.nextTree());
				adaptor.addChild(root_1, stream_path.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:858:32: ( replicationClause )?
				if ( stream_replicationClause.hasNext() ) {
					adaptor.addChild(root_1, stream_replicationClause.nextTree());
				}
				stream_replicationClause.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "exportStatement"


	public static class importStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "importStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:861:1: importStatement : KW_IMPORT ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )? KW_FROM (path= StringLiteral ) ( tableLocation )? -> ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? ) ;
	public final HiveParser.importStatement_return importStatement() throws RecognitionException {
		HiveParser.importStatement_return retval = new HiveParser.importStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token ext=null;
		Token path=null;
		Token KW_IMPORT51=null;
		Token KW_TABLE52=null;
		Token KW_FROM53=null;
		ParserRuleReturnScope tab =null;
		ParserRuleReturnScope tableLocation54 =null;

		ASTNode ext_tree=null;
		ASTNode path_tree=null;
		ASTNode KW_IMPORT51_tree=null;
		ASTNode KW_TABLE52_tree=null;
		ASTNode KW_FROM53_tree=null;
		RewriteRuleTokenStream stream_KW_EXTERNAL=new RewriteRuleTokenStream(adaptor,"token KW_EXTERNAL");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_IMPORT=new RewriteRuleTokenStream(adaptor,"token KW_IMPORT");
		RewriteRuleSubtreeStream stream_tableLocation=new RewriteRuleSubtreeStream(adaptor,"rule tableLocation");
		RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");

		 pushMsg("import statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:864:8: ( KW_IMPORT ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )? KW_FROM (path= StringLiteral ) ( tableLocation )? -> ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:864:10: KW_IMPORT ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )? KW_FROM (path= StringLiteral ) ( tableLocation )?
			{
			KW_IMPORT51=(Token)match(input,KW_IMPORT,FOLLOW_KW_IMPORT_in_importStatement1966); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_IMPORT.add(KW_IMPORT51);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:865:10: ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )?
			int alt15=2;
			int LA15_0 = input.LA(1);
			if ( (LA15_0==KW_EXTERNAL||LA15_0==KW_TABLE) ) {
				alt15=1;
			}
			switch (alt15) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:865:11: (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition )
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:865:11: (ext= KW_EXTERNAL )?
					int alt14=2;
					int LA14_0 = input.LA(1);
					if ( (LA14_0==KW_EXTERNAL) ) {
						alt14=1;
					}
					switch (alt14) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:865:12: ext= KW_EXTERNAL
							{
							ext=(Token)match(input,KW_EXTERNAL,FOLLOW_KW_EXTERNAL_in_importStatement1981); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_EXTERNAL.add(ext);

							}
							break;

					}

					KW_TABLE52=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_importStatement1985); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE52);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:865:39: (tab= tableOrPartition )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:865:40: tab= tableOrPartition
					{
					pushFollow(FOLLOW_tableOrPartition_in_importStatement1990);
					tab=tableOrPartition();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableOrPartition.add(tab.getTree());
					}

					}
					break;

			}

			KW_FROM53=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_importStatement2004); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM53);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:866:18: (path= StringLiteral )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:866:19: path= StringLiteral
			{
			path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_importStatement2009); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(path);

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:867:10: ( tableLocation )?
			int alt16=2;
			int LA16_0 = input.LA(1);
			if ( (LA16_0==KW_LOCATION) ) {
				alt16=1;
			}
			switch (alt16) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:867:10: tableLocation
					{
					pushFollow(FOLLOW_tableLocation_in_importStatement2021);
					tableLocation54=tableLocation();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableLocation.add(tableLocation54.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: tab, path, ext, tableLocation
			// token labels: ext, path
			// rule labels: tab, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_ext=new RewriteRuleTokenStream(adaptor,"token ext",ext);
			RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
			RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 868:5: -> ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:868:8: ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_IMPORT, "TOK_IMPORT"), root_1);
				adaptor.addChild(root_1, stream_path.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:868:28: ( $tab)?
				if ( stream_tab.hasNext() ) {
					adaptor.addChild(root_1, stream_tab.nextTree());
				}
				stream_tab.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:868:34: ( $ext)?
				if ( stream_ext.hasNext() ) {
					adaptor.addChild(root_1, stream_ext.nextNode());
				}
				stream_ext.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:868:39: ( tableLocation )?
				if ( stream_tableLocation.hasNext() ) {
					adaptor.addChild(root_1, stream_tableLocation.nextTree());
				}
				stream_tableLocation.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "importStatement"


	public static class replDumpStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "replDumpStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:871:1: replDumpStatement : KW_REPL KW_DUMP (dbName= identifier ) ( DOT tblName= identifier )? ( KW_FROM (eventId= Number ) ( KW_TO (rangeEnd= Number ) )? ( KW_LIMIT (batchSize= Number ) )? )? ( KW_WITH replConf= replConfigs )? -> ^( TOK_REPL_DUMP $dbName ( ^( TOK_TABNAME $tblName) )? ( ^( TOK_FROM $eventId ( TOK_TO $rangeEnd)? ( TOK_LIMIT $batchSize)? ) )? ( $replConf)? ) ;
	public final HiveParser.replDumpStatement_return replDumpStatement() throws RecognitionException {
		HiveParser.replDumpStatement_return retval = new HiveParser.replDumpStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token eventId=null;
		Token rangeEnd=null;
		Token batchSize=null;
		Token KW_REPL55=null;
		Token KW_DUMP56=null;
		Token DOT57=null;
		Token KW_FROM58=null;
		Token KW_TO59=null;
		Token KW_LIMIT60=null;
		Token KW_WITH61=null;
		ParserRuleReturnScope dbName =null;
		ParserRuleReturnScope tblName =null;
		ParserRuleReturnScope replConf =null;

		ASTNode eventId_tree=null;
		ASTNode rangeEnd_tree=null;
		ASTNode batchSize_tree=null;
		ASTNode KW_REPL55_tree=null;
		ASTNode KW_DUMP56_tree=null;
		ASTNode DOT57_tree=null;
		ASTNode KW_FROM58_tree=null;
		ASTNode KW_TO59_tree=null;
		ASTNode KW_LIMIT60_tree=null;
		ASTNode KW_WITH61_tree=null;
		RewriteRuleTokenStream stream_KW_DUMP=new RewriteRuleTokenStream(adaptor,"token KW_DUMP");
		RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
		RewriteRuleTokenStream stream_KW_REPL=new RewriteRuleTokenStream(adaptor,"token KW_REPL");
		RewriteRuleTokenStream stream_DOT=new RewriteRuleTokenStream(adaptor,"token DOT");
		RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
		RewriteRuleTokenStream stream_KW_LIMIT=new RewriteRuleTokenStream(adaptor,"token KW_LIMIT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_replConfigs=new RewriteRuleSubtreeStream(adaptor,"rule replConfigs");

		 pushMsg("replication dump statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:874:7: ( KW_REPL KW_DUMP (dbName= identifier ) ( DOT tblName= identifier )? ( KW_FROM (eventId= Number ) ( KW_TO (rangeEnd= Number ) )? ( KW_LIMIT (batchSize= Number ) )? )? ( KW_WITH replConf= replConfigs )? -> ^( TOK_REPL_DUMP $dbName ( ^( TOK_TABNAME $tblName) )? ( ^( TOK_FROM $eventId ( TOK_TO $rangeEnd)? ( TOK_LIMIT $batchSize)? ) )? ( $replConf)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:874:9: KW_REPL KW_DUMP (dbName= identifier ) ( DOT tblName= identifier )? ( KW_FROM (eventId= Number ) ( KW_TO (rangeEnd= Number ) )? ( KW_LIMIT (batchSize= Number ) )? )? ( KW_WITH replConf= replConfigs )?
			{
			KW_REPL55=(Token)match(input,KW_REPL,FOLLOW_KW_REPL_in_replDumpStatement2075); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REPL.add(KW_REPL55);

			KW_DUMP56=(Token)match(input,KW_DUMP,FOLLOW_KW_DUMP_in_replDumpStatement2077); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DUMP.add(KW_DUMP56);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:875:9: (dbName= identifier )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:875:10: dbName= identifier
			{
			pushFollow(FOLLOW_identifier_in_replDumpStatement2090);
			dbName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:875:29: ( DOT tblName= identifier )?
			int alt17=2;
			int LA17_0 = input.LA(1);
			if ( (LA17_0==DOT) ) {
				alt17=1;
			}
			switch (alt17) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:875:30: DOT tblName= identifier
					{
					DOT57=(Token)match(input,DOT,FOLLOW_DOT_in_replDumpStatement2094); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_DOT.add(DOT57);

					pushFollow(FOLLOW_identifier_in_replDumpStatement2098);
					tblName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(tblName.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:876:9: ( KW_FROM (eventId= Number ) ( KW_TO (rangeEnd= Number ) )? ( KW_LIMIT (batchSize= Number ) )? )?
			int alt20=2;
			int LA20_0 = input.LA(1);
			if ( (LA20_0==KW_FROM) ) {
				alt20=1;
			}
			switch (alt20) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:876:10: KW_FROM (eventId= Number ) ( KW_TO (rangeEnd= Number ) )? ( KW_LIMIT (batchSize= Number ) )?
					{
					KW_FROM58=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_replDumpStatement2111); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM58);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:876:18: (eventId= Number )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:876:19: eventId= Number
					{
					eventId=(Token)match(input,Number,FOLLOW_Number_in_replDumpStatement2116); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_Number.add(eventId);

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:877:11: ( KW_TO (rangeEnd= Number ) )?
					int alt18=2;
					int LA18_0 = input.LA(1);
					if ( (LA18_0==KW_TO) ) {
						alt18=1;
					}
					switch (alt18) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:877:12: KW_TO (rangeEnd= Number )
							{
							KW_TO59=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_replDumpStatement2130); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_TO.add(KW_TO59);

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:877:18: (rangeEnd= Number )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:877:19: rangeEnd= Number
							{
							rangeEnd=(Token)match(input,Number,FOLLOW_Number_in_replDumpStatement2135); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_Number.add(rangeEnd);

							}

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:878:11: ( KW_LIMIT (batchSize= Number ) )?
					int alt19=2;
					int LA19_0 = input.LA(1);
					if ( (LA19_0==KW_LIMIT) ) {
						alt19=1;
					}
					switch (alt19) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:878:12: KW_LIMIT (batchSize= Number )
							{
							KW_LIMIT60=(Token)match(input,KW_LIMIT,FOLLOW_KW_LIMIT_in_replDumpStatement2151); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LIMIT.add(KW_LIMIT60);

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:878:21: (batchSize= Number )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:878:22: batchSize= Number
							{
							batchSize=(Token)match(input,Number,FOLLOW_Number_in_replDumpStatement2156); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_Number.add(batchSize);

							}

							}
							break;

					}

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:880:9: ( KW_WITH replConf= replConfigs )?
			int alt21=2;
			int LA21_0 = input.LA(1);
			if ( (LA21_0==KW_WITH) ) {
				alt21=1;
			}
			switch (alt21) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:880:10: KW_WITH replConf= replConfigs
					{
					KW_WITH61=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_replDumpStatement2181); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH61);

					pushFollow(FOLLOW_replConfigs_in_replDumpStatement2185);
					replConf=replConfigs();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_replConfigs.add(replConf.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: eventId, replConf, rangeEnd, batchSize, dbName, tblName
			// token labels: eventId, batchSize, rangeEnd
			// rule labels: dbName, tblName, retval, replConf
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_eventId=new RewriteRuleTokenStream(adaptor,"token eventId",eventId);
			RewriteRuleTokenStream stream_batchSize=new RewriteRuleTokenStream(adaptor,"token batchSize",batchSize);
			RewriteRuleTokenStream stream_rangeEnd=new RewriteRuleTokenStream(adaptor,"token rangeEnd",rangeEnd);
			RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
			RewriteRuleSubtreeStream stream_tblName=new RewriteRuleSubtreeStream(adaptor,"rule tblName",tblName!=null?tblName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
			RewriteRuleSubtreeStream stream_replConf=new RewriteRuleSubtreeStream(adaptor,"rule replConf",replConf!=null?replConf.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 881:5: -> ^( TOK_REPL_DUMP $dbName ( ^( TOK_TABNAME $tblName) )? ( ^( TOK_FROM $eventId ( TOK_TO $rangeEnd)? ( TOK_LIMIT $batchSize)? ) )? ( $replConf)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:8: ^( TOK_REPL_DUMP $dbName ( ^( TOK_TABNAME $tblName) )? ( ^( TOK_FROM $eventId ( TOK_TO $rangeEnd)? ( TOK_LIMIT $batchSize)? ) )? ( $replConf)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REPL_DUMP, "TOK_REPL_DUMP"), root_1);
				adaptor.addChild(root_1, stream_dbName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:32: ( ^( TOK_TABNAME $tblName) )?
				if ( stream_tblName.hasNext() ) {
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:32: ^( TOK_TABNAME $tblName)
					{
					ASTNode root_2 = (ASTNode)adaptor.nil();
					root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABNAME, "TOK_TABNAME"), root_2);
					adaptor.addChild(root_2, stream_tblName.nextTree());
					adaptor.addChild(root_1, root_2);
					}

				}
				stream_tblName.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:57: ( ^( TOK_FROM $eventId ( TOK_TO $rangeEnd)? ( TOK_LIMIT $batchSize)? ) )?
				if ( stream_eventId.hasNext()||stream_rangeEnd.hasNext()||stream_batchSize.hasNext() ) {
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:57: ^( TOK_FROM $eventId ( TOK_TO $rangeEnd)? ( TOK_LIMIT $batchSize)? )
					{
					ASTNode root_2 = (ASTNode)adaptor.nil();
					root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"), root_2);
					adaptor.addChild(root_2, stream_eventId.nextNode());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:77: ( TOK_TO $rangeEnd)?
					if ( stream_rangeEnd.hasNext() ) {
						adaptor.addChild(root_2, (ASTNode)adaptor.create(TOK_TO, "TOK_TO"));
						adaptor.addChild(root_2, stream_rangeEnd.nextNode());
					}
					stream_rangeEnd.reset();

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:97: ( TOK_LIMIT $batchSize)?
					if ( stream_batchSize.hasNext() ) {
						adaptor.addChild(root_2, (ASTNode)adaptor.create(TOK_LIMIT, "TOK_LIMIT"));
						adaptor.addChild(root_2, stream_batchSize.nextNode());
					}
					stream_batchSize.reset();

					adaptor.addChild(root_1, root_2);
					}

				}
				stream_eventId.reset();
				stream_rangeEnd.reset();
				stream_batchSize.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:881:124: ( $replConf)?
				if ( stream_replConf.hasNext() ) {
					adaptor.addChild(root_1, stream_replConf.nextTree());
				}
				stream_replConf.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "replDumpStatement"


	public static class replLoadStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "replLoadStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:884:1: replLoadStatement : KW_REPL KW_LOAD ( (dbName= identifier ) ( DOT tblName= identifier )? )? KW_FROM (path= StringLiteral ) ( KW_WITH replConf= replConfigs )? -> ^( TOK_REPL_LOAD $path ( ^( TOK_DBNAME $dbName) )? ( ^( TOK_TABNAME $tblName) )? ( $replConf)? ) ;
	public final HiveParser.replLoadStatement_return replLoadStatement() throws RecognitionException {
		HiveParser.replLoadStatement_return retval = new HiveParser.replLoadStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token path=null;
		Token KW_REPL62=null;
		Token KW_LOAD63=null;
		Token DOT64=null;
		Token KW_FROM65=null;
		Token KW_WITH66=null;
		ParserRuleReturnScope dbName =null;
		ParserRuleReturnScope tblName =null;
		ParserRuleReturnScope replConf =null;

		ASTNode path_tree=null;
		ASTNode KW_REPL62_tree=null;
		ASTNode KW_LOAD63_tree=null;
		ASTNode DOT64_tree=null;
		ASTNode KW_FROM65_tree=null;
		ASTNode KW_WITH66_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_REPL=new RewriteRuleTokenStream(adaptor,"token KW_REPL");
		RewriteRuleTokenStream stream_DOT=new RewriteRuleTokenStream(adaptor,"token DOT");
		RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
		RewriteRuleTokenStream stream_KW_LOAD=new RewriteRuleTokenStream(adaptor,"token KW_LOAD");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_replConfigs=new RewriteRuleSubtreeStream(adaptor,"rule replConfigs");

		 pushMsg("replication load statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:887:7: ( KW_REPL KW_LOAD ( (dbName= identifier ) ( DOT tblName= identifier )? )? KW_FROM (path= StringLiteral ) ( KW_WITH replConf= replConfigs )? -> ^( TOK_REPL_LOAD $path ( ^( TOK_DBNAME $dbName) )? ( ^( TOK_TABNAME $tblName) )? ( $replConf)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:887:9: KW_REPL KW_LOAD ( (dbName= identifier ) ( DOT tblName= identifier )? )? KW_FROM (path= StringLiteral ) ( KW_WITH replConf= replConfigs )?
			{
			KW_REPL62=(Token)match(input,KW_REPL,FOLLOW_KW_REPL_in_replLoadStatement2265); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REPL.add(KW_REPL62);

			KW_LOAD63=(Token)match(input,KW_LOAD,FOLLOW_KW_LOAD_in_replLoadStatement2267); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOAD.add(KW_LOAD63);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:888:9: ( (dbName= identifier ) ( DOT tblName= identifier )? )?
			int alt23=2;
			int LA23_0 = input.LA(1);
			if ( (LA23_0==Identifier||(LA23_0 >= KW_ABORT && LA23_0 <= KW_AFTER)||LA23_0==KW_ALLOC_FRACTION||LA23_0==KW_ANALYZE||LA23_0==KW_ARCHIVE||LA23_0==KW_ASC||(LA23_0 >= KW_AUTOCOMMIT && LA23_0 <= KW_BEFORE)||(LA23_0 >= KW_BUCKET && LA23_0 <= KW_BUCKETS)||(LA23_0 >= KW_CACHE && LA23_0 <= KW_CASCADE)||LA23_0==KW_CHANGE||(LA23_0 >= KW_CHECK && LA23_0 <= KW_COLLECTION)||(LA23_0 >= KW_COLUMNS && LA23_0 <= KW_COMMENT)||(LA23_0 >= KW_COMPACT && LA23_0 <= KW_CONCATENATE)||LA23_0==KW_CONTINUE||LA23_0==KW_DATA||LA23_0==KW_DATABASES||(LA23_0 >= KW_DATETIME && LA23_0 <= KW_DBPROPERTIES)||(LA23_0 >= KW_DEFAULT && LA23_0 <= KW_DEFINED)||(LA23_0 >= KW_DELIMITED && LA23_0 <= KW_DESC)||(LA23_0 >= KW_DETAIL && LA23_0 <= KW_DISABLE)||(LA23_0 >= KW_DISTRIBUTE && LA23_0 <= KW_DO)||LA23_0==KW_DOW||(LA23_0 >= KW_DUMP && LA23_0 <= KW_ELEM_TYPE)||LA23_0==KW_ENABLE||(LA23_0 >= KW_ENFORCED && LA23_0 <= KW_ESCAPED)||LA23_0==KW_EXCLUSIVE||(LA23_0 >= KW_EXPLAIN && LA23_0 <= KW_EXPRESSION)||(LA23_0 >= KW_FIELDS && LA23_0 <= KW_FIRST)||(LA23_0 >= KW_FORMAT && LA23_0 <= KW_FORMATTED)||LA23_0==KW_FUNCTIONS||(LA23_0 >= KW_HOUR && LA23_0 <= KW_IDXPROPERTIES)||(LA23_0 >= KW_INDEX && LA23_0 <= KW_INDEXES)||(LA23_0 >= KW_INPATH && LA23_0 <= KW_INPUTFORMAT)||(LA23_0 >= KW_ISOLATION && LA23_0 <= KW_JAR)||(LA23_0 >= KW_KEY && LA23_0 <= KW_LAST)||LA23_0==KW_LEVEL||(LA23_0 >= KW_LIMIT && LA23_0 <= KW_LOAD)||(LA23_0 >= KW_LOCATION && LA23_0 <= KW_LONG)||LA23_0==KW_MANAGEMENT||(LA23_0 >= KW_MAPJOIN && LA23_0 <= KW_MATERIALIZED)||LA23_0==KW_METADATA||(LA23_0 >= KW_MINUTE && LA23_0 <= KW_MONTH)||(LA23_0 >= KW_MOVE && LA23_0 <= KW_MSCK)||(LA23_0 >= KW_NORELY && LA23_0 <= KW_NOSCAN)||LA23_0==KW_NOVALIDATE||LA23_0==KW_NULLS||LA23_0==KW_OFFSET||(LA23_0 >= KW_OPERATOR && LA23_0 <= KW_OPTION)||(LA23_0 >= KW_OUTPUTDRIVER && LA23_0 <= KW_OUTPUTFORMAT)||(LA23_0 >= KW_OVERWRITE && LA23_0 <= KW_OWNER)||(LA23_0 >= KW_PARTITIONED && LA23_0 <= KW_PATH)||(LA23_0 >= KW_PLAN && LA23_0 <= KW_POOL)||LA23_0==KW_PRINCIPALS||(LA23_0 >= KW_PURGE && LA23_0 <= KW_QUERY_PARALLELISM)||LA23_0==KW_READ||(LA23_0 >= KW_REBUILD && LA23_0 <= KW_RECORDWRITER)||(LA23_0 >= KW_RELOAD && LA23_0 <= KW_RESTRICT)||LA23_0==KW_REWRITE||(LA23_0 >= KW_ROLE && LA23_0 <= KW_ROLES)||(LA23_0 >= KW_SCHEDULING_POLICY && LA23_0 <= KW_SECOND)||(LA23_0 >= KW_SEMI && LA23_0 <= KW_SERVER)||(LA23_0 >= KW_SETS && LA23_0 <= KW_SKEWED)||(LA23_0 >= KW_SNAPSHOT && LA23_0 <= KW_SSL)||(LA23_0 >= KW_STATISTICS && LA23_0 <= KW_SUMMARY)||LA23_0==KW_TABLES||(LA23_0 >= KW_TBLPROPERTIES && LA23_0 <= KW_TERMINATED)||LA23_0==KW_TINYINT||(LA23_0 >= KW_TOUCH && LA23_0 <= KW_TRANSACTIONS)||LA23_0==KW_UNARCHIVE||LA23_0==KW_UNDO||LA23_0==KW_UNIONTYPE||(LA23_0 >= KW_UNLOCK && LA23_0 <= KW_UNSIGNED)||(LA23_0 >= KW_URI && LA23_0 <= KW_USE)||(LA23_0 >= KW_UTC && LA23_0 <= KW_VALIDATE)||LA23_0==KW_VALUE_TYPE||(LA23_0 >= KW_VECTORIZATION && LA23_0 <= KW_WEEK)||LA23_0==KW_WHILE||(LA23_0 >= KW_WORK && LA23_0 <= KW_ZONE)||LA23_0==KW_BATCH||LA23_0==KW_DAYOFWEEK||LA23_0==KW_HOLD_DDLTIME||LA23_0==KW_IGNORE||LA23_0==KW_NO_DROP||LA23_0==KW_OFFLINE||LA23_0==KW_PROTECTION||LA23_0==KW_READONLY||LA23_0==KW_TIMESTAMPTZ) ) {
				alt23=1;
			}
			switch (alt23) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:888:10: (dbName= identifier ) ( DOT tblName= identifier )?
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:888:10: (dbName= identifier )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:888:11: dbName= identifier
					{
					pushFollow(FOLLOW_identifier_in_replLoadStatement2281);
					dbName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:888:30: ( DOT tblName= identifier )?
					int alt22=2;
					int LA22_0 = input.LA(1);
					if ( (LA22_0==DOT) ) {
						alt22=1;
					}
					switch (alt22) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:888:31: DOT tblName= identifier
							{
							DOT64=(Token)match(input,DOT,FOLLOW_DOT_in_replLoadStatement2285); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_DOT.add(DOT64);

							pushFollow(FOLLOW_identifier_in_replLoadStatement2289);
							tblName=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(tblName.getTree());
							}
							break;

					}

					}
					break;

			}

			KW_FROM65=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_replLoadStatement2303); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM65);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:889:17: (path= StringLiteral )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:889:18: path= StringLiteral
			{
			path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_replLoadStatement2308); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(path);

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:890:9: ( KW_WITH replConf= replConfigs )?
			int alt24=2;
			int LA24_0 = input.LA(1);
			if ( (LA24_0==KW_WITH) ) {
				alt24=1;
			}
			switch (alt24) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:890:10: KW_WITH replConf= replConfigs
					{
					KW_WITH66=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_replLoadStatement2320); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH66);

					pushFollow(FOLLOW_replConfigs_in_replLoadStatement2324);
					replConf=replConfigs();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_replConfigs.add(replConf.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: dbName, path, replConf, tblName
			// token labels: path
			// rule labels: dbName, tblName, retval, replConf
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
			RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
			RewriteRuleSubtreeStream stream_tblName=new RewriteRuleSubtreeStream(adaptor,"rule tblName",tblName!=null?tblName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
			RewriteRuleSubtreeStream stream_replConf=new RewriteRuleSubtreeStream(adaptor,"rule replConf",replConf!=null?replConf.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 891:7: -> ^( TOK_REPL_LOAD $path ( ^( TOK_DBNAME $dbName) )? ( ^( TOK_TABNAME $tblName) )? ( $replConf)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:891:10: ^( TOK_REPL_LOAD $path ( ^( TOK_DBNAME $dbName) )? ( ^( TOK_TABNAME $tblName) )? ( $replConf)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REPL_LOAD, "TOK_REPL_LOAD"), root_1);
				adaptor.addChild(root_1, stream_path.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:891:32: ( ^( TOK_DBNAME $dbName) )?
				if ( stream_dbName.hasNext() ) {
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:891:32: ^( TOK_DBNAME $dbName)
					{
					ASTNode root_2 = (ASTNode)adaptor.nil();
					root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DBNAME, "TOK_DBNAME"), root_2);
					adaptor.addChild(root_2, stream_dbName.nextTree());
					adaptor.addChild(root_1, root_2);
					}

				}
				stream_dbName.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:891:55: ( ^( TOK_TABNAME $tblName) )?
				if ( stream_tblName.hasNext() ) {
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:891:55: ^( TOK_TABNAME $tblName)
					{
					ASTNode root_2 = (ASTNode)adaptor.nil();
					root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABNAME, "TOK_TABNAME"), root_2);
					adaptor.addChild(root_2, stream_tblName.nextTree());
					adaptor.addChild(root_1, root_2);
					}

				}
				stream_tblName.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:891:81: ( $replConf)?
				if ( stream_replConf.hasNext() ) {
					adaptor.addChild(root_1, stream_replConf.nextTree());
				}
				stream_replConf.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "replLoadStatement"


	public static class replConfigs_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "replConfigs"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:894:1: replConfigs : LPAREN replConfigsList RPAREN -> ^( TOK_REPL_CONFIG replConfigsList ) ;
	public final HiveParser.replConfigs_return replConfigs() throws RecognitionException {
		HiveParser.replConfigs_return retval = new HiveParser.replConfigs_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN67=null;
		Token RPAREN69=null;
		ParserRuleReturnScope replConfigsList68 =null;

		ASTNode LPAREN67_tree=null;
		ASTNode RPAREN69_tree=null;
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_replConfigsList=new RewriteRuleSubtreeStream(adaptor,"rule replConfigsList");

		 pushMsg("repl configurations", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:897:5: ( LPAREN replConfigsList RPAREN -> ^( TOK_REPL_CONFIG replConfigsList ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:898:7: LPAREN replConfigsList RPAREN
			{
			LPAREN67=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_replConfigs2396); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN67);

			pushFollow(FOLLOW_replConfigsList_in_replConfigs2398);
			replConfigsList68=replConfigsList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_replConfigsList.add(replConfigsList68.getTree());
			RPAREN69=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_replConfigs2400); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN69);

			// AST REWRITE
			// elements: replConfigsList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 898:37: -> ^( TOK_REPL_CONFIG replConfigsList )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:898:40: ^( TOK_REPL_CONFIG replConfigsList )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REPL_CONFIG, "TOK_REPL_CONFIG"), root_1);
				adaptor.addChild(root_1, stream_replConfigsList.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "replConfigs"


	public static class replConfigsList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "replConfigsList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:901:1: replConfigsList : keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_REPL_CONFIG_LIST ( keyValueProperty )+ ) ;
	public final HiveParser.replConfigsList_return replConfigsList() throws RecognitionException {
		HiveParser.replConfigsList_return retval = new HiveParser.replConfigsList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA71=null;
		ParserRuleReturnScope keyValueProperty70 =null;
		ParserRuleReturnScope keyValueProperty72 =null;

		ASTNode COMMA71_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_keyValueProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyValueProperty");

		 pushMsg("repl configurations list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:904:5: ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_REPL_CONFIG_LIST ( keyValueProperty )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:905:7: keyValueProperty ( COMMA keyValueProperty )*
			{
			pushFollow(FOLLOW_keyValueProperty_in_replConfigsList2441);
			keyValueProperty70=keyValueProperty();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_keyValueProperty.add(keyValueProperty70.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:905:24: ( COMMA keyValueProperty )*
			loop25:
			while (true) {
				int alt25=2;
				int LA25_0 = input.LA(1);
				if ( (LA25_0==COMMA) ) {
					alt25=1;
				}

				switch (alt25) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:905:25: COMMA keyValueProperty
					{
					COMMA71=(Token)match(input,COMMA,FOLLOW_COMMA_in_replConfigsList2444); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA71);

					pushFollow(FOLLOW_keyValueProperty_in_replConfigsList2446);
					keyValueProperty72=keyValueProperty();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_keyValueProperty.add(keyValueProperty72.getTree());
					}
					break;

				default :
					break loop25;
				}
			}

			// AST REWRITE
			// elements: keyValueProperty
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 905:50: -> ^( TOK_REPL_CONFIG_LIST ( keyValueProperty )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:905:53: ^( TOK_REPL_CONFIG_LIST ( keyValueProperty )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REPL_CONFIG_LIST, "TOK_REPL_CONFIG_LIST"), root_1);
				if ( !(stream_keyValueProperty.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_keyValueProperty.hasNext() ) {
					adaptor.addChild(root_1, stream_keyValueProperty.nextTree());
				}
				stream_keyValueProperty.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "replConfigsList"


	public static class replStatusStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "replStatusStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:908:1: replStatusStatement : KW_REPL KW_STATUS (dbName= identifier ) ( DOT tblName= identifier )? ( KW_WITH replConf= replConfigs )? -> ^( TOK_REPL_STATUS $dbName ( ^( TOK_TABNAME $tblName) )? ( $replConf)? ) ;
	public final HiveParser.replStatusStatement_return replStatusStatement() throws RecognitionException {
		HiveParser.replStatusStatement_return retval = new HiveParser.replStatusStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_REPL73=null;
		Token KW_STATUS74=null;
		Token DOT75=null;
		Token KW_WITH76=null;
		ParserRuleReturnScope dbName =null;
		ParserRuleReturnScope tblName =null;
		ParserRuleReturnScope replConf =null;

		ASTNode KW_REPL73_tree=null;
		ASTNode KW_STATUS74_tree=null;
		ASTNode DOT75_tree=null;
		ASTNode KW_WITH76_tree=null;
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_STATUS=new RewriteRuleTokenStream(adaptor,"token KW_STATUS");
		RewriteRuleTokenStream stream_KW_REPL=new RewriteRuleTokenStream(adaptor,"token KW_REPL");
		RewriteRuleTokenStream stream_DOT=new RewriteRuleTokenStream(adaptor,"token DOT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_replConfigs=new RewriteRuleSubtreeStream(adaptor,"rule replConfigs");

		 pushMsg("replication status statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:911:7: ( KW_REPL KW_STATUS (dbName= identifier ) ( DOT tblName= identifier )? ( KW_WITH replConf= replConfigs )? -> ^( TOK_REPL_STATUS $dbName ( ^( TOK_TABNAME $tblName) )? ( $replConf)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:911:9: KW_REPL KW_STATUS (dbName= identifier ) ( DOT tblName= identifier )? ( KW_WITH replConf= replConfigs )?
			{
			KW_REPL73=(Token)match(input,KW_REPL,FOLLOW_KW_REPL_in_replStatusStatement2486); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REPL.add(KW_REPL73);

			KW_STATUS74=(Token)match(input,KW_STATUS,FOLLOW_KW_STATUS_in_replStatusStatement2488); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_STATUS.add(KW_STATUS74);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:912:9: (dbName= identifier )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:912:10: dbName= identifier
			{
			pushFollow(FOLLOW_identifier_in_replStatusStatement2501);
			dbName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:912:29: ( DOT tblName= identifier )?
			int alt26=2;
			int LA26_0 = input.LA(1);
			if ( (LA26_0==DOT) ) {
				alt26=1;
			}
			switch (alt26) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:912:30: DOT tblName= identifier
					{
					DOT75=(Token)match(input,DOT,FOLLOW_DOT_in_replStatusStatement2505); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_DOT.add(DOT75);

					pushFollow(FOLLOW_identifier_in_replStatusStatement2509);
					tblName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(tblName.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:913:9: ( KW_WITH replConf= replConfigs )?
			int alt27=2;
			int LA27_0 = input.LA(1);
			if ( (LA27_0==KW_WITH) ) {
				alt27=1;
			}
			switch (alt27) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:913:10: KW_WITH replConf= replConfigs
					{
					KW_WITH76=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_replStatusStatement2522); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH76);

					pushFollow(FOLLOW_replConfigs_in_replStatusStatement2526);
					replConf=replConfigs();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_replConfigs.add(replConf.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: dbName, tblName, replConf
			// token labels: 
			// rule labels: dbName, tblName, retval, replConf
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
			RewriteRuleSubtreeStream stream_tblName=new RewriteRuleSubtreeStream(adaptor,"rule tblName",tblName!=null?tblName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
			RewriteRuleSubtreeStream stream_replConf=new RewriteRuleSubtreeStream(adaptor,"rule replConf",replConf!=null?replConf.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 914:7: -> ^( TOK_REPL_STATUS $dbName ( ^( TOK_TABNAME $tblName) )? ( $replConf)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:914:10: ^( TOK_REPL_STATUS $dbName ( ^( TOK_TABNAME $tblName) )? ( $replConf)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REPL_STATUS, "TOK_REPL_STATUS"), root_1);
				adaptor.addChild(root_1, stream_dbName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:914:36: ( ^( TOK_TABNAME $tblName) )?
				if ( stream_tblName.hasNext() ) {
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:914:36: ^( TOK_TABNAME $tblName)
					{
					ASTNode root_2 = (ASTNode)adaptor.nil();
					root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABNAME, "TOK_TABNAME"), root_2);
					adaptor.addChild(root_2, stream_tblName.nextTree());
					adaptor.addChild(root_1, root_2);
					}

				}
				stream_tblName.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:914:62: ( $replConf)?
				if ( stream_replConf.hasNext() ) {
					adaptor.addChild(root_1, stream_replConf.nextTree());
				}
				stream_replConf.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "replStatusStatement"


	public static class ddlStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "ddlStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:917:1: ddlStatement : ( createDatabaseStatement | switchDatabaseStatement | dropDatabaseStatement | createTableStatement | dropTableStatement | truncateTableStatement | alterStatement | descStatement | showStatement | metastoreCheck | createViewStatement | createMaterializedViewStatement | dropViewStatement | dropMaterializedViewStatement | createFunctionStatement | createMacroStatement | dropFunctionStatement | reloadFunctionStatement | dropMacroStatement | analyzeStatement | lockStatement | unlockStatement | lockDatabase | unlockDatabase | createRoleStatement | dropRoleStatement | ( grantPrivileges )=> grantPrivileges | ( revokePrivileges )=> revokePrivileges | showGrants | showRoleGrants | showRolePrincipals | showRoles | grantRole | revokeRole | setRole | showCurrentRole | abortTransactionStatement | killQueryStatement | resourcePlanDdlStatements );
	public final HiveParser.ddlStatement_return ddlStatement() throws RecognitionException {
		HiveParser.ddlStatement_return retval = new HiveParser.ddlStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope createDatabaseStatement77 =null;
		ParserRuleReturnScope switchDatabaseStatement78 =null;
		ParserRuleReturnScope dropDatabaseStatement79 =null;
		ParserRuleReturnScope createTableStatement80 =null;
		ParserRuleReturnScope dropTableStatement81 =null;
		ParserRuleReturnScope truncateTableStatement82 =null;
		ParserRuleReturnScope alterStatement83 =null;
		ParserRuleReturnScope descStatement84 =null;
		ParserRuleReturnScope showStatement85 =null;
		ParserRuleReturnScope metastoreCheck86 =null;
		ParserRuleReturnScope createViewStatement87 =null;
		ParserRuleReturnScope createMaterializedViewStatement88 =null;
		ParserRuleReturnScope dropViewStatement89 =null;
		ParserRuleReturnScope dropMaterializedViewStatement90 =null;
		ParserRuleReturnScope createFunctionStatement91 =null;
		ParserRuleReturnScope createMacroStatement92 =null;
		ParserRuleReturnScope dropFunctionStatement93 =null;
		ParserRuleReturnScope reloadFunctionStatement94 =null;
		ParserRuleReturnScope dropMacroStatement95 =null;
		ParserRuleReturnScope analyzeStatement96 =null;
		ParserRuleReturnScope lockStatement97 =null;
		ParserRuleReturnScope unlockStatement98 =null;
		ParserRuleReturnScope lockDatabase99 =null;
		ParserRuleReturnScope unlockDatabase100 =null;
		ParserRuleReturnScope createRoleStatement101 =null;
		ParserRuleReturnScope dropRoleStatement102 =null;
		ParserRuleReturnScope grantPrivileges103 =null;
		ParserRuleReturnScope revokePrivileges104 =null;
		ParserRuleReturnScope showGrants105 =null;
		ParserRuleReturnScope showRoleGrants106 =null;
		ParserRuleReturnScope showRolePrincipals107 =null;
		ParserRuleReturnScope showRoles108 =null;
		ParserRuleReturnScope grantRole109 =null;
		ParserRuleReturnScope revokeRole110 =null;
		ParserRuleReturnScope setRole111 =null;
		ParserRuleReturnScope showCurrentRole112 =null;
		ParserRuleReturnScope abortTransactionStatement113 =null;
		ParserRuleReturnScope killQueryStatement114 =null;
		ParserRuleReturnScope resourcePlanDdlStatements115 =null;


		 pushMsg("ddl statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:920:5: ( createDatabaseStatement | switchDatabaseStatement | dropDatabaseStatement | createTableStatement | dropTableStatement | truncateTableStatement | alterStatement | descStatement | showStatement | metastoreCheck | createViewStatement | createMaterializedViewStatement | dropViewStatement | dropMaterializedViewStatement | createFunctionStatement | createMacroStatement | dropFunctionStatement | reloadFunctionStatement | dropMacroStatement | analyzeStatement | lockStatement | unlockStatement | lockDatabase | unlockDatabase | createRoleStatement | dropRoleStatement | ( grantPrivileges )=> grantPrivileges | ( revokePrivileges )=> revokePrivileges | showGrants | showRoleGrants | showRolePrincipals | showRoles | grantRole | revokeRole | setRole | showCurrentRole | abortTransactionStatement | killQueryStatement | resourcePlanDdlStatements )
			int alt28=39;
			alt28 = dfa28.predict(input);
			switch (alt28) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:920:7: createDatabaseStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_createDatabaseStatement_in_ddlStatement2584);
					createDatabaseStatement77=createDatabaseStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createDatabaseStatement77.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:921:7: switchDatabaseStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_switchDatabaseStatement_in_ddlStatement2592);
					switchDatabaseStatement78=switchDatabaseStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, switchDatabaseStatement78.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:922:7: dropDatabaseStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_dropDatabaseStatement_in_ddlStatement2600);
					dropDatabaseStatement79=dropDatabaseStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, dropDatabaseStatement79.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:923:7: createTableStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_createTableStatement_in_ddlStatement2608);
					createTableStatement80=createTableStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createTableStatement80.getTree());

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:924:7: dropTableStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_dropTableStatement_in_ddlStatement2616);
					dropTableStatement81=dropTableStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, dropTableStatement81.getTree());

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:925:7: truncateTableStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_truncateTableStatement_in_ddlStatement2624);
					truncateTableStatement82=truncateTableStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, truncateTableStatement82.getTree());

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:926:7: alterStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatement_in_ddlStatement2632);
					alterStatement83=alterStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatement83.getTree());

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:927:7: descStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_descStatement_in_ddlStatement2640);
					descStatement84=descStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, descStatement84.getTree());

					}
					break;
				case 9 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:928:7: showStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_showStatement_in_ddlStatement2648);
					showStatement85=showStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, showStatement85.getTree());

					}
					break;
				case 10 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:929:7: metastoreCheck
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_metastoreCheck_in_ddlStatement2656);
					metastoreCheck86=metastoreCheck();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, metastoreCheck86.getTree());

					}
					break;
				case 11 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:930:7: createViewStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_createViewStatement_in_ddlStatement2664);
					createViewStatement87=createViewStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createViewStatement87.getTree());

					}
					break;
				case 12 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:931:7: createMaterializedViewStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_createMaterializedViewStatement_in_ddlStatement2672);
					createMaterializedViewStatement88=createMaterializedViewStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createMaterializedViewStatement88.getTree());

					}
					break;
				case 13 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:932:7: dropViewStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_dropViewStatement_in_ddlStatement2680);
					dropViewStatement89=dropViewStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, dropViewStatement89.getTree());

					}
					break;
				case 14 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:933:7: dropMaterializedViewStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_dropMaterializedViewStatement_in_ddlStatement2688);
					dropMaterializedViewStatement90=dropMaterializedViewStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, dropMaterializedViewStatement90.getTree());

					}
					break;
				case 15 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:934:7: createFunctionStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_createFunctionStatement_in_ddlStatement2696);
					createFunctionStatement91=createFunctionStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createFunctionStatement91.getTree());

					}
					break;
				case 16 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:935:7: createMacroStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_createMacroStatement_in_ddlStatement2704);
					createMacroStatement92=createMacroStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createMacroStatement92.getTree());

					}
					break;
				case 17 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:936:7: dropFunctionStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_dropFunctionStatement_in_ddlStatement2712);
					dropFunctionStatement93=dropFunctionStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, dropFunctionStatement93.getTree());

					}
					break;
				case 18 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:937:7: reloadFunctionStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_reloadFunctionStatement_in_ddlStatement2720);
					reloadFunctionStatement94=reloadFunctionStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, reloadFunctionStatement94.getTree());

					}
					break;
				case 19 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:938:7: dropMacroStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_dropMacroStatement_in_ddlStatement2728);
					dropMacroStatement95=dropMacroStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, dropMacroStatement95.getTree());

					}
					break;
				case 20 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:939:7: analyzeStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_analyzeStatement_in_ddlStatement2736);
					analyzeStatement96=analyzeStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, analyzeStatement96.getTree());

					}
					break;
				case 21 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:940:7: lockStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_lockStatement_in_ddlStatement2744);
					lockStatement97=lockStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, lockStatement97.getTree());

					}
					break;
				case 22 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:941:7: unlockStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_unlockStatement_in_ddlStatement2752);
					unlockStatement98=unlockStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, unlockStatement98.getTree());

					}
					break;
				case 23 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:942:7: lockDatabase
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_lockDatabase_in_ddlStatement2760);
					lockDatabase99=lockDatabase();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, lockDatabase99.getTree());

					}
					break;
				case 24 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:943:7: unlockDatabase
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_unlockDatabase_in_ddlStatement2768);
					unlockDatabase100=unlockDatabase();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, unlockDatabase100.getTree());

					}
					break;
				case 25 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:944:7: createRoleStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_createRoleStatement_in_ddlStatement2776);
					createRoleStatement101=createRoleStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createRoleStatement101.getTree());

					}
					break;
				case 26 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:945:7: dropRoleStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_dropRoleStatement_in_ddlStatement2784);
					dropRoleStatement102=dropRoleStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, dropRoleStatement102.getTree());

					}
					break;
				case 27 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:946:7: ( grantPrivileges )=> grantPrivileges
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_grantPrivileges_in_ddlStatement2798);
					grantPrivileges103=grantPrivileges();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, grantPrivileges103.getTree());

					}
					break;
				case 28 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:947:7: ( revokePrivileges )=> revokePrivileges
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_revokePrivileges_in_ddlStatement2812);
					revokePrivileges104=revokePrivileges();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, revokePrivileges104.getTree());

					}
					break;
				case 29 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:948:7: showGrants
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_showGrants_in_ddlStatement2820);
					showGrants105=showGrants();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, showGrants105.getTree());

					}
					break;
				case 30 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:949:7: showRoleGrants
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_showRoleGrants_in_ddlStatement2828);
					showRoleGrants106=showRoleGrants();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, showRoleGrants106.getTree());

					}
					break;
				case 31 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:950:7: showRolePrincipals
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_showRolePrincipals_in_ddlStatement2836);
					showRolePrincipals107=showRolePrincipals();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, showRolePrincipals107.getTree());

					}
					break;
				case 32 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:951:7: showRoles
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_showRoles_in_ddlStatement2844);
					showRoles108=showRoles();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, showRoles108.getTree());

					}
					break;
				case 33 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:952:7: grantRole
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_grantRole_in_ddlStatement2852);
					grantRole109=grantRole();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, grantRole109.getTree());

					}
					break;
				case 34 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:953:7: revokeRole
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_revokeRole_in_ddlStatement2860);
					revokeRole110=revokeRole();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, revokeRole110.getTree());

					}
					break;
				case 35 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:954:7: setRole
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_setRole_in_ddlStatement2868);
					setRole111=setRole();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, setRole111.getTree());

					}
					break;
				case 36 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:955:7: showCurrentRole
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_showCurrentRole_in_ddlStatement2876);
					showCurrentRole112=showCurrentRole();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, showCurrentRole112.getTree());

					}
					break;
				case 37 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:956:7: abortTransactionStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_abortTransactionStatement_in_ddlStatement2884);
					abortTransactionStatement113=abortTransactionStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, abortTransactionStatement113.getTree());

					}
					break;
				case 38 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:957:7: killQueryStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_killQueryStatement_in_ddlStatement2892);
					killQueryStatement114=killQueryStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, killQueryStatement114.getTree());

					}
					break;
				case 39 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:958:7: resourcePlanDdlStatements
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_resourcePlanDdlStatements_in_ddlStatement2900);
					resourcePlanDdlStatements115=resourcePlanDdlStatements();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, resourcePlanDdlStatements115.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "ddlStatement"


	public static class ifExists_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "ifExists"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:961:1: ifExists : KW_IF KW_EXISTS -> ^( TOK_IFEXISTS ) ;
	public final HiveParser.ifExists_return ifExists() throws RecognitionException {
		HiveParser.ifExists_return retval = new HiveParser.ifExists_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_IF116=null;
		Token KW_EXISTS117=null;

		ASTNode KW_IF116_tree=null;
		ASTNode KW_EXISTS117_tree=null;
		RewriteRuleTokenStream stream_KW_EXISTS=new RewriteRuleTokenStream(adaptor,"token KW_EXISTS");
		RewriteRuleTokenStream stream_KW_IF=new RewriteRuleTokenStream(adaptor,"token KW_IF");

		 pushMsg("if exists clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:964:5: ( KW_IF KW_EXISTS -> ^( TOK_IFEXISTS ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:964:7: KW_IF KW_EXISTS
			{
			KW_IF116=(Token)match(input,KW_IF,FOLLOW_KW_IF_in_ifExists2927); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_IF.add(KW_IF116);

			KW_EXISTS117=(Token)match(input,KW_EXISTS,FOLLOW_KW_EXISTS_in_ifExists2929); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_EXISTS.add(KW_EXISTS117);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 965:5: -> ^( TOK_IFEXISTS )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:965:8: ^( TOK_IFEXISTS )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_IFEXISTS, "TOK_IFEXISTS"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "ifExists"


	public static class restrictOrCascade_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "restrictOrCascade"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:968:1: restrictOrCascade : ( KW_RESTRICT -> ^( TOK_RESTRICT ) | KW_CASCADE -> ^( TOK_CASCADE ) );
	public final HiveParser.restrictOrCascade_return restrictOrCascade() throws RecognitionException {
		HiveParser.restrictOrCascade_return retval = new HiveParser.restrictOrCascade_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_RESTRICT118=null;
		Token KW_CASCADE119=null;

		ASTNode KW_RESTRICT118_tree=null;
		ASTNode KW_CASCADE119_tree=null;
		RewriteRuleTokenStream stream_KW_CASCADE=new RewriteRuleTokenStream(adaptor,"token KW_CASCADE");
		RewriteRuleTokenStream stream_KW_RESTRICT=new RewriteRuleTokenStream(adaptor,"token KW_RESTRICT");

		 pushMsg("restrict or cascade clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:971:5: ( KW_RESTRICT -> ^( TOK_RESTRICT ) | KW_CASCADE -> ^( TOK_CASCADE ) )
			int alt29=2;
			int LA29_0 = input.LA(1);
			if ( (LA29_0==KW_RESTRICT) ) {
				alt29=1;
			}
			else if ( (LA29_0==KW_CASCADE) ) {
				alt29=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 29, 0, input);
				throw nvae;
			}

			switch (alt29) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:971:7: KW_RESTRICT
					{
					KW_RESTRICT118=(Token)match(input,KW_RESTRICT,FOLLOW_KW_RESTRICT_in_restrictOrCascade2966); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_RESTRICT.add(KW_RESTRICT118);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 972:5: -> ^( TOK_RESTRICT )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:972:8: ^( TOK_RESTRICT )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RESTRICT, "TOK_RESTRICT"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:973:7: KW_CASCADE
					{
					KW_CASCADE119=(Token)match(input,KW_CASCADE,FOLLOW_KW_CASCADE_in_restrictOrCascade2984); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CASCADE.add(KW_CASCADE119);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 974:5: -> ^( TOK_CASCADE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:974:8: ^( TOK_CASCADE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CASCADE, "TOK_CASCADE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "restrictOrCascade"


	public static class ifNotExists_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "ifNotExists"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:977:1: ifNotExists : KW_IF KW_NOT KW_EXISTS -> ^( TOK_IFNOTEXISTS ) ;
	public final HiveParser.ifNotExists_return ifNotExists() throws RecognitionException {
		HiveParser.ifNotExists_return retval = new HiveParser.ifNotExists_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_IF120=null;
		Token KW_NOT121=null;
		Token KW_EXISTS122=null;

		ASTNode KW_IF120_tree=null;
		ASTNode KW_NOT121_tree=null;
		ASTNode KW_EXISTS122_tree=null;
		RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
		RewriteRuleTokenStream stream_KW_EXISTS=new RewriteRuleTokenStream(adaptor,"token KW_EXISTS");
		RewriteRuleTokenStream stream_KW_IF=new RewriteRuleTokenStream(adaptor,"token KW_IF");

		 pushMsg("if not exists clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:980:5: ( KW_IF KW_NOT KW_EXISTS -> ^( TOK_IFNOTEXISTS ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:980:7: KW_IF KW_NOT KW_EXISTS
			{
			KW_IF120=(Token)match(input,KW_IF,FOLLOW_KW_IF_in_ifNotExists3021); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_IF.add(KW_IF120);

			KW_NOT121=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_ifNotExists3023); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT121);

			KW_EXISTS122=(Token)match(input,KW_EXISTS,FOLLOW_KW_EXISTS_in_ifNotExists3025); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_EXISTS.add(KW_EXISTS122);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 981:5: -> ^( TOK_IFNOTEXISTS )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:981:8: ^( TOK_IFNOTEXISTS )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_IFNOTEXISTS, "TOK_IFNOTEXISTS"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "ifNotExists"


	public static class rewriteEnabled_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "rewriteEnabled"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:984:1: rewriteEnabled : KW_ENABLE KW_REWRITE -> ^( TOK_REWRITE_ENABLED ) ;
	public final HiveParser.rewriteEnabled_return rewriteEnabled() throws RecognitionException {
		HiveParser.rewriteEnabled_return retval = new HiveParser.rewriteEnabled_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ENABLE123=null;
		Token KW_REWRITE124=null;

		ASTNode KW_ENABLE123_tree=null;
		ASTNode KW_REWRITE124_tree=null;
		RewriteRuleTokenStream stream_KW_REWRITE=new RewriteRuleTokenStream(adaptor,"token KW_REWRITE");
		RewriteRuleTokenStream stream_KW_ENABLE=new RewriteRuleTokenStream(adaptor,"token KW_ENABLE");

		 pushMsg("rewrite enabled clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:987:5: ( KW_ENABLE KW_REWRITE -> ^( TOK_REWRITE_ENABLED ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:987:7: KW_ENABLE KW_REWRITE
			{
			KW_ENABLE123=(Token)match(input,KW_ENABLE,FOLLOW_KW_ENABLE_in_rewriteEnabled3062); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ENABLE.add(KW_ENABLE123);

			KW_REWRITE124=(Token)match(input,KW_REWRITE,FOLLOW_KW_REWRITE_in_rewriteEnabled3064); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REWRITE.add(KW_REWRITE124);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 988:5: -> ^( TOK_REWRITE_ENABLED )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:988:8: ^( TOK_REWRITE_ENABLED )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REWRITE_ENABLED, "TOK_REWRITE_ENABLED"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "rewriteEnabled"


	public static class rewriteDisabled_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "rewriteDisabled"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:991:1: rewriteDisabled : KW_DISABLE KW_REWRITE -> ^( TOK_REWRITE_DISABLED ) ;
	public final HiveParser.rewriteDisabled_return rewriteDisabled() throws RecognitionException {
		HiveParser.rewriteDisabled_return retval = new HiveParser.rewriteDisabled_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DISABLE125=null;
		Token KW_REWRITE126=null;

		ASTNode KW_DISABLE125_tree=null;
		ASTNode KW_REWRITE126_tree=null;
		RewriteRuleTokenStream stream_KW_DISABLE=new RewriteRuleTokenStream(adaptor,"token KW_DISABLE");
		RewriteRuleTokenStream stream_KW_REWRITE=new RewriteRuleTokenStream(adaptor,"token KW_REWRITE");

		 pushMsg("rewrite disabled clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:994:5: ( KW_DISABLE KW_REWRITE -> ^( TOK_REWRITE_DISABLED ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:994:7: KW_DISABLE KW_REWRITE
			{
			KW_DISABLE125=(Token)match(input,KW_DISABLE,FOLLOW_KW_DISABLE_in_rewriteDisabled3101); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DISABLE.add(KW_DISABLE125);

			KW_REWRITE126=(Token)match(input,KW_REWRITE,FOLLOW_KW_REWRITE_in_rewriteDisabled3103); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REWRITE.add(KW_REWRITE126);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 995:5: -> ^( TOK_REWRITE_DISABLED )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:995:8: ^( TOK_REWRITE_DISABLED )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REWRITE_DISABLED, "TOK_REWRITE_DISABLED"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "rewriteDisabled"


	public static class storedAsDirs_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "storedAsDirs"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:998:1: storedAsDirs : KW_STORED KW_AS KW_DIRECTORIES -> ^( TOK_STOREDASDIRS ) ;
	public final HiveParser.storedAsDirs_return storedAsDirs() throws RecognitionException {
		HiveParser.storedAsDirs_return retval = new HiveParser.storedAsDirs_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_STORED127=null;
		Token KW_AS128=null;
		Token KW_DIRECTORIES129=null;

		ASTNode KW_STORED127_tree=null;
		ASTNode KW_AS128_tree=null;
		ASTNode KW_DIRECTORIES129_tree=null;
		RewriteRuleTokenStream stream_KW_DIRECTORIES=new RewriteRuleTokenStream(adaptor,"token KW_DIRECTORIES");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleTokenStream stream_KW_STORED=new RewriteRuleTokenStream(adaptor,"token KW_STORED");

		 pushMsg("stored as directories", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1001:5: ( KW_STORED KW_AS KW_DIRECTORIES -> ^( TOK_STOREDASDIRS ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1001:7: KW_STORED KW_AS KW_DIRECTORIES
			{
			KW_STORED127=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_storedAsDirs3140); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_STORED.add(KW_STORED127);

			KW_AS128=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_storedAsDirs3142); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS128);

			KW_DIRECTORIES129=(Token)match(input,KW_DIRECTORIES,FOLLOW_KW_DIRECTORIES_in_storedAsDirs3144); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DIRECTORIES.add(KW_DIRECTORIES129);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1002:5: -> ^( TOK_STOREDASDIRS )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1002:8: ^( TOK_STOREDASDIRS )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_STOREDASDIRS, "TOK_STOREDASDIRS"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "storedAsDirs"


	public static class orReplace_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "orReplace"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1005:1: orReplace : KW_OR KW_REPLACE -> ^( TOK_ORREPLACE ) ;
	public final HiveParser.orReplace_return orReplace() throws RecognitionException {
		HiveParser.orReplace_return retval = new HiveParser.orReplace_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_OR130=null;
		Token KW_REPLACE131=null;

		ASTNode KW_OR130_tree=null;
		ASTNode KW_REPLACE131_tree=null;
		RewriteRuleTokenStream stream_KW_REPLACE=new RewriteRuleTokenStream(adaptor,"token KW_REPLACE");
		RewriteRuleTokenStream stream_KW_OR=new RewriteRuleTokenStream(adaptor,"token KW_OR");

		 pushMsg("or replace clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1008:5: ( KW_OR KW_REPLACE -> ^( TOK_ORREPLACE ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1008:7: KW_OR KW_REPLACE
			{
			KW_OR130=(Token)match(input,KW_OR,FOLLOW_KW_OR_in_orReplace3181); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_OR.add(KW_OR130);

			KW_REPLACE131=(Token)match(input,KW_REPLACE,FOLLOW_KW_REPLACE_in_orReplace3183); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REPLACE.add(KW_REPLACE131);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1009:5: -> ^( TOK_ORREPLACE )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1009:8: ^( TOK_ORREPLACE )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ORREPLACE, "TOK_ORREPLACE"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "orReplace"


	public static class createDatabaseStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createDatabaseStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1012:1: createDatabaseStatement : KW_CREATE ( KW_DATABASE | KW_SCHEMA ) ( ifNotExists )? name= identifier ( databaseComment )? ( dbLocation )? ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )? -> ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? ) ;
	public final HiveParser.createDatabaseStatement_return createDatabaseStatement() throws RecognitionException {
		HiveParser.createDatabaseStatement_return retval = new HiveParser.createDatabaseStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CREATE132=null;
		Token KW_DATABASE133=null;
		Token KW_SCHEMA134=null;
		Token KW_WITH138=null;
		Token KW_DBPROPERTIES139=null;
		ParserRuleReturnScope name =null;
		ParserRuleReturnScope dbprops =null;
		ParserRuleReturnScope ifNotExists135 =null;
		ParserRuleReturnScope databaseComment136 =null;
		ParserRuleReturnScope dbLocation137 =null;

		ASTNode KW_CREATE132_tree=null;
		ASTNode KW_DATABASE133_tree=null;
		ASTNode KW_SCHEMA134_tree=null;
		ASTNode KW_WITH138_tree=null;
		ASTNode KW_DBPROPERTIES139_tree=null;
		RewriteRuleTokenStream stream_KW_DBPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_DBPROPERTIES");
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_dbProperties=new RewriteRuleSubtreeStream(adaptor,"rule dbProperties");
		RewriteRuleSubtreeStream stream_databaseComment=new RewriteRuleSubtreeStream(adaptor,"rule databaseComment");
		RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
		RewriteRuleSubtreeStream stream_dbLocation=new RewriteRuleSubtreeStream(adaptor,"rule dbLocation");

		 pushMsg("create database statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1015:5: ( KW_CREATE ( KW_DATABASE | KW_SCHEMA ) ( ifNotExists )? name= identifier ( databaseComment )? ( dbLocation )? ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )? -> ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1015:7: KW_CREATE ( KW_DATABASE | KW_SCHEMA ) ( ifNotExists )? name= identifier ( databaseComment )? ( dbLocation )? ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )?
			{
			KW_CREATE132=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createDatabaseStatement3220); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE132);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1015:17: ( KW_DATABASE | KW_SCHEMA )
			int alt30=2;
			int LA30_0 = input.LA(1);
			if ( (LA30_0==KW_DATABASE) ) {
				alt30=1;
			}
			else if ( (LA30_0==KW_SCHEMA) ) {
				alt30=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 30, 0, input);
				throw nvae;
			}

			switch (alt30) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1015:18: KW_DATABASE
					{
					KW_DATABASE133=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_createDatabaseStatement3223); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE133);

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1015:30: KW_SCHEMA
					{
					KW_SCHEMA134=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_createDatabaseStatement3225); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA134);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1016:9: ( ifNotExists )?
			int alt31=2;
			int LA31_0 = input.LA(1);
			if ( (LA31_0==KW_IF) ) {
				alt31=1;
			}
			switch (alt31) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1016:9: ifNotExists
					{
					pushFollow(FOLLOW_ifNotExists_in_createDatabaseStatement3236);
					ifNotExists135=ifNotExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifNotExists.add(ifNotExists135.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_identifier_in_createDatabaseStatement3249);
			name=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(name.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1018:9: ( databaseComment )?
			int alt32=2;
			int LA32_0 = input.LA(1);
			if ( (LA32_0==KW_COMMENT) ) {
				alt32=1;
			}
			switch (alt32) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1018:9: databaseComment
					{
					pushFollow(FOLLOW_databaseComment_in_createDatabaseStatement3259);
					databaseComment136=databaseComment();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_databaseComment.add(databaseComment136.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1019:9: ( dbLocation )?
			int alt33=2;
			int LA33_0 = input.LA(1);
			if ( (LA33_0==KW_LOCATION) ) {
				alt33=1;
			}
			switch (alt33) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1019:9: dbLocation
					{
					pushFollow(FOLLOW_dbLocation_in_createDatabaseStatement3270);
					dbLocation137=dbLocation();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_dbLocation.add(dbLocation137.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1020:9: ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )?
			int alt34=2;
			int LA34_0 = input.LA(1);
			if ( (LA34_0==KW_WITH) ) {
				alt34=1;
			}
			switch (alt34) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1020:10: KW_WITH KW_DBPROPERTIES dbprops= dbProperties
					{
					KW_WITH138=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_createDatabaseStatement3282); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH138);

					KW_DBPROPERTIES139=(Token)match(input,KW_DBPROPERTIES,FOLLOW_KW_DBPROPERTIES_in_createDatabaseStatement3284); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DBPROPERTIES.add(KW_DBPROPERTIES139);

					pushFollow(FOLLOW_dbProperties_in_createDatabaseStatement3288);
					dbprops=dbProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_dbProperties.add(dbprops.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: name, dbLocation, dbprops, ifNotExists, databaseComment
			// token labels: 
			// rule labels: name, dbprops, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.getTree():null);
			RewriteRuleSubtreeStream stream_dbprops=new RewriteRuleSubtreeStream(adaptor,"rule dbprops",dbprops!=null?dbprops.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1021:5: -> ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1021:8: ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATEDATABASE, "TOK_CREATEDATABASE"), root_1);
				adaptor.addChild(root_1, stream_name.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1021:35: ( ifNotExists )?
				if ( stream_ifNotExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifNotExists.nextTree());
				}
				stream_ifNotExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1021:48: ( dbLocation )?
				if ( stream_dbLocation.hasNext() ) {
					adaptor.addChild(root_1, stream_dbLocation.nextTree());
				}
				stream_dbLocation.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1021:60: ( databaseComment )?
				if ( stream_databaseComment.hasNext() ) {
					adaptor.addChild(root_1, stream_databaseComment.nextTree());
				}
				stream_databaseComment.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1021:78: ( $dbprops)?
				if ( stream_dbprops.hasNext() ) {
					adaptor.addChild(root_1, stream_dbprops.nextTree());
				}
				stream_dbprops.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createDatabaseStatement"


	public static class dbLocation_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dbLocation"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1024:1: dbLocation : KW_LOCATION locn= StringLiteral -> ^( TOK_DATABASELOCATION $locn) ;
	public final HiveParser.dbLocation_return dbLocation() throws RecognitionException {
		HiveParser.dbLocation_return retval = new HiveParser.dbLocation_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token locn=null;
		Token KW_LOCATION140=null;

		ASTNode locn_tree=null;
		ASTNode KW_LOCATION140_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");

		 pushMsg("database location specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1027:5: ( KW_LOCATION locn= StringLiteral -> ^( TOK_DATABASELOCATION $locn) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1028:7: KW_LOCATION locn= StringLiteral
			{
			KW_LOCATION140=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_dbLocation3349); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCATION.add(KW_LOCATION140);

			locn=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_dbLocation3353); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(locn);

			// AST REWRITE
			// elements: locn
			// token labels: locn
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_locn=new RewriteRuleTokenStream(adaptor,"token locn",locn);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1028:38: -> ^( TOK_DATABASELOCATION $locn)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1028:41: ^( TOK_DATABASELOCATION $locn)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DATABASELOCATION, "TOK_DATABASELOCATION"), root_1);
				adaptor.addChild(root_1, stream_locn.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dbLocation"


	public static class dbProperties_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dbProperties"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1031:1: dbProperties : LPAREN dbPropertiesList RPAREN -> ^( TOK_DATABASEPROPERTIES dbPropertiesList ) ;
	public final HiveParser.dbProperties_return dbProperties() throws RecognitionException {
		HiveParser.dbProperties_return retval = new HiveParser.dbProperties_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN141=null;
		Token RPAREN143=null;
		ParserRuleReturnScope dbPropertiesList142 =null;

		ASTNode LPAREN141_tree=null;
		ASTNode RPAREN143_tree=null;
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_dbPropertiesList=new RewriteRuleSubtreeStream(adaptor,"rule dbPropertiesList");

		 pushMsg("dbproperties", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1034:5: ( LPAREN dbPropertiesList RPAREN -> ^( TOK_DATABASEPROPERTIES dbPropertiesList ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1035:7: LPAREN dbPropertiesList RPAREN
			{
			LPAREN141=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_dbProperties3395); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN141);

			pushFollow(FOLLOW_dbPropertiesList_in_dbProperties3397);
			dbPropertiesList142=dbPropertiesList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_dbPropertiesList.add(dbPropertiesList142.getTree());
			RPAREN143=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_dbProperties3399); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN143);

			// AST REWRITE
			// elements: dbPropertiesList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1035:38: -> ^( TOK_DATABASEPROPERTIES dbPropertiesList )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1035:41: ^( TOK_DATABASEPROPERTIES dbPropertiesList )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DATABASEPROPERTIES, "TOK_DATABASEPROPERTIES"), root_1);
				adaptor.addChild(root_1, stream_dbPropertiesList.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dbProperties"


	public static class dbPropertiesList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dbPropertiesList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1038:1: dbPropertiesList : keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_DBPROPLIST ( keyValueProperty )+ ) ;
	public final HiveParser.dbPropertiesList_return dbPropertiesList() throws RecognitionException {
		HiveParser.dbPropertiesList_return retval = new HiveParser.dbPropertiesList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA145=null;
		ParserRuleReturnScope keyValueProperty144 =null;
		ParserRuleReturnScope keyValueProperty146 =null;

		ASTNode COMMA145_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_keyValueProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyValueProperty");

		 pushMsg("database properties list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1041:5: ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_DBPROPLIST ( keyValueProperty )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1042:7: keyValueProperty ( COMMA keyValueProperty )*
			{
			pushFollow(FOLLOW_keyValueProperty_in_dbPropertiesList3440);
			keyValueProperty144=keyValueProperty();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_keyValueProperty.add(keyValueProperty144.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1042:24: ( COMMA keyValueProperty )*
			loop35:
			while (true) {
				int alt35=2;
				int LA35_0 = input.LA(1);
				if ( (LA35_0==COMMA) ) {
					alt35=1;
				}

				switch (alt35) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1042:25: COMMA keyValueProperty
					{
					COMMA145=(Token)match(input,COMMA,FOLLOW_COMMA_in_dbPropertiesList3443); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA145);

					pushFollow(FOLLOW_keyValueProperty_in_dbPropertiesList3445);
					keyValueProperty146=keyValueProperty();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_keyValueProperty.add(keyValueProperty146.getTree());
					}
					break;

				default :
					break loop35;
				}
			}

			// AST REWRITE
			// elements: keyValueProperty
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1042:50: -> ^( TOK_DBPROPLIST ( keyValueProperty )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1042:53: ^( TOK_DBPROPLIST ( keyValueProperty )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DBPROPLIST, "TOK_DBPROPLIST"), root_1);
				if ( !(stream_keyValueProperty.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_keyValueProperty.hasNext() ) {
					adaptor.addChild(root_1, stream_keyValueProperty.nextTree());
				}
				stream_keyValueProperty.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dbPropertiesList"


	public static class switchDatabaseStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "switchDatabaseStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1046:1: switchDatabaseStatement : KW_USE identifier -> ^( TOK_SWITCHDATABASE identifier ) ;
	public final HiveParser.switchDatabaseStatement_return switchDatabaseStatement() throws RecognitionException {
		HiveParser.switchDatabaseStatement_return retval = new HiveParser.switchDatabaseStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_USE147=null;
		ParserRuleReturnScope identifier148 =null;

		ASTNode KW_USE147_tree=null;
		RewriteRuleTokenStream stream_KW_USE=new RewriteRuleTokenStream(adaptor,"token KW_USE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		 pushMsg("switch database statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1049:5: ( KW_USE identifier -> ^( TOK_SWITCHDATABASE identifier ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1049:7: KW_USE identifier
			{
			KW_USE147=(Token)match(input,KW_USE,FOLLOW_KW_USE_in_switchDatabaseStatement3484); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_USE.add(KW_USE147);

			pushFollow(FOLLOW_identifier_in_switchDatabaseStatement3486);
			identifier148=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(identifier148.getTree());
			// AST REWRITE
			// elements: identifier
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1050:5: -> ^( TOK_SWITCHDATABASE identifier )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1050:8: ^( TOK_SWITCHDATABASE identifier )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SWITCHDATABASE, "TOK_SWITCHDATABASE"), root_1);
				adaptor.addChild(root_1, stream_identifier.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "switchDatabaseStatement"


	public static class dropDatabaseStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dropDatabaseStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1053:1: dropDatabaseStatement : KW_DROP ( KW_DATABASE | KW_SCHEMA ) ( ifExists )? identifier ( restrictOrCascade )? -> ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? ) ;
	public final HiveParser.dropDatabaseStatement_return dropDatabaseStatement() throws RecognitionException {
		HiveParser.dropDatabaseStatement_return retval = new HiveParser.dropDatabaseStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP149=null;
		Token KW_DATABASE150=null;
		Token KW_SCHEMA151=null;
		ParserRuleReturnScope ifExists152 =null;
		ParserRuleReturnScope identifier153 =null;
		ParserRuleReturnScope restrictOrCascade154 =null;

		ASTNode KW_DROP149_tree=null;
		ASTNode KW_DATABASE150_tree=null;
		ASTNode KW_SCHEMA151_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
		RewriteRuleSubtreeStream stream_restrictOrCascade=new RewriteRuleSubtreeStream(adaptor,"rule restrictOrCascade");

		 pushMsg("drop database statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:5: ( KW_DROP ( KW_DATABASE | KW_SCHEMA ) ( ifExists )? identifier ( restrictOrCascade )? -> ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:7: KW_DROP ( KW_DATABASE | KW_SCHEMA ) ( ifExists )? identifier ( restrictOrCascade )?
			{
			KW_DROP149=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropDatabaseStatement3525); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP149);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:15: ( KW_DATABASE | KW_SCHEMA )
			int alt36=2;
			int LA36_0 = input.LA(1);
			if ( (LA36_0==KW_DATABASE) ) {
				alt36=1;
			}
			else if ( (LA36_0==KW_SCHEMA) ) {
				alt36=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 36, 0, input);
				throw nvae;
			}

			switch (alt36) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:16: KW_DATABASE
					{
					KW_DATABASE150=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_dropDatabaseStatement3528); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE150);

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:28: KW_SCHEMA
					{
					KW_SCHEMA151=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_dropDatabaseStatement3530); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA151);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:39: ( ifExists )?
			int alt37=2;
			int LA37_0 = input.LA(1);
			if ( (LA37_0==KW_IF) ) {
				alt37=1;
			}
			switch (alt37) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:39: ifExists
					{
					pushFollow(FOLLOW_ifExists_in_dropDatabaseStatement3533);
					ifExists152=ifExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifExists.add(ifExists152.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_identifier_in_dropDatabaseStatement3536);
			identifier153=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(identifier153.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:60: ( restrictOrCascade )?
			int alt38=2;
			int LA38_0 = input.LA(1);
			if ( (LA38_0==KW_CASCADE||LA38_0==KW_RESTRICT) ) {
				alt38=1;
			}
			switch (alt38) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1056:60: restrictOrCascade
					{
					pushFollow(FOLLOW_restrictOrCascade_in_dropDatabaseStatement3538);
					restrictOrCascade154=restrictOrCascade();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_restrictOrCascade.add(restrictOrCascade154.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: restrictOrCascade, ifExists, identifier
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1057:5: -> ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1057:8: ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROPDATABASE, "TOK_DROPDATABASE"), root_1);
				adaptor.addChild(root_1, stream_identifier.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1057:38: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1057:48: ( restrictOrCascade )?
				if ( stream_restrictOrCascade.hasNext() ) {
					adaptor.addChild(root_1, stream_restrictOrCascade.nextTree());
				}
				stream_restrictOrCascade.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dropDatabaseStatement"


	public static class databaseComment_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "databaseComment"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1060:1: databaseComment : KW_COMMENT comment= StringLiteral -> ^( TOK_DATABASECOMMENT $comment) ;
	public final HiveParser.databaseComment_return databaseComment() throws RecognitionException {
		HiveParser.databaseComment_return retval = new HiveParser.databaseComment_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_COMMENT155=null;

		ASTNode comment_tree=null;
		ASTNode KW_COMMENT155_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");

		 pushMsg("database's comment", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1063:5: ( KW_COMMENT comment= StringLiteral -> ^( TOK_DATABASECOMMENT $comment) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1063:7: KW_COMMENT comment= StringLiteral
			{
			KW_COMMENT155=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_databaseComment3584); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT155);

			comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_databaseComment3588); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

			// AST REWRITE
			// elements: comment
			// token labels: comment
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1064:5: -> ^( TOK_DATABASECOMMENT $comment)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1064:8: ^( TOK_DATABASECOMMENT $comment)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DATABASECOMMENT, "TOK_DATABASECOMMENT"), root_1);
				adaptor.addChild(root_1, stream_comment.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "databaseComment"


	public static class createTableStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createTableStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1067:1: createTableStatement : KW_CREATE (temp= KW_TEMPORARY )? (ext= KW_EXTERNAL )? KW_TABLE ( ifNotExists )? name= tableName (like= KW_LIKE likeName= tableName ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeOrConstraintList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? ) -> ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeOrConstraintList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? ) ;
	public final HiveParser.createTableStatement_return createTableStatement() throws RecognitionException {
		HiveParser.createTableStatement_return retval = new HiveParser.createTableStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token temp=null;
		Token ext=null;
		Token like=null;
		Token KW_CREATE156=null;
		Token KW_TABLE157=null;
		Token LPAREN163=null;
		Token RPAREN165=null;
		Token KW_AS174=null;
		ParserRuleReturnScope name =null;
		ParserRuleReturnScope likeName =null;
		ParserRuleReturnScope ifNotExists158 =null;
		ParserRuleReturnScope tableRowFormat159 =null;
		ParserRuleReturnScope tableFileFormat160 =null;
		ParserRuleReturnScope tableLocation161 =null;
		ParserRuleReturnScope tablePropertiesPrefixed162 =null;
		ParserRuleReturnScope columnNameTypeOrConstraintList164 =null;
		ParserRuleReturnScope tableComment166 =null;
		ParserRuleReturnScope tablePartition167 =null;
		ParserRuleReturnScope tableBuckets168 =null;
		ParserRuleReturnScope tableSkewed169 =null;
		ParserRuleReturnScope tableRowFormat170 =null;
		ParserRuleReturnScope tableFileFormat171 =null;
		ParserRuleReturnScope tableLocation172 =null;
		ParserRuleReturnScope tablePropertiesPrefixed173 =null;
		ParserRuleReturnScope selectStatementWithCTE175 =null;

		ASTNode temp_tree=null;
		ASTNode ext_tree=null;
		ASTNode like_tree=null;
		ASTNode KW_CREATE156_tree=null;
		ASTNode KW_TABLE157_tree=null;
		ASTNode LPAREN163_tree=null;
		ASTNode RPAREN165_tree=null;
		ASTNode KW_AS174_tree=null;
		RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_KW_EXTERNAL=new RewriteRuleTokenStream(adaptor,"token KW_EXTERNAL");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleTokenStream stream_KW_LIKE=new RewriteRuleTokenStream(adaptor,"token KW_LIKE");
		RewriteRuleSubtreeStream stream_tableRowFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormat");
		RewriteRuleSubtreeStream stream_selectStatementWithCTE=new RewriteRuleSubtreeStream(adaptor,"rule selectStatementWithCTE");
		RewriteRuleSubtreeStream stream_tableLocation=new RewriteRuleSubtreeStream(adaptor,"rule tableLocation");
		RewriteRuleSubtreeStream stream_columnNameTypeOrConstraintList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeOrConstraintList");
		RewriteRuleSubtreeStream stream_tableSkewed=new RewriteRuleSubtreeStream(adaptor,"rule tableSkewed");
		RewriteRuleSubtreeStream stream_tablePropertiesPrefixed=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesPrefixed");
		RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
		RewriteRuleSubtreeStream stream_tableFileFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableFileFormat");
		RewriteRuleSubtreeStream stream_tableComment=new RewriteRuleSubtreeStream(adaptor,"rule tableComment");
		RewriteRuleSubtreeStream stream_tablePartition=new RewriteRuleSubtreeStream(adaptor,"rule tablePartition");
		RewriteRuleSubtreeStream stream_tableBuckets=new RewriteRuleSubtreeStream(adaptor,"rule tableBuckets");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("create table statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:5: ( KW_CREATE (temp= KW_TEMPORARY )? (ext= KW_EXTERNAL )? KW_TABLE ( ifNotExists )? name= tableName (like= KW_LIKE likeName= tableName ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeOrConstraintList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? ) -> ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeOrConstraintList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:7: KW_CREATE (temp= KW_TEMPORARY )? (ext= KW_EXTERNAL )? KW_TABLE ( ifNotExists )? name= tableName (like= KW_LIKE likeName= tableName ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeOrConstraintList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? )
			{
			KW_CREATE156=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createTableStatement3628); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE156);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:17: (temp= KW_TEMPORARY )?
			int alt39=2;
			int LA39_0 = input.LA(1);
			if ( (LA39_0==KW_TEMPORARY) ) {
				alt39=1;
			}
			switch (alt39) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:18: temp= KW_TEMPORARY
					{
					temp=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_createTableStatement3633); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TEMPORARY.add(temp);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:38: (ext= KW_EXTERNAL )?
			int alt40=2;
			int LA40_0 = input.LA(1);
			if ( (LA40_0==KW_EXTERNAL) ) {
				alt40=1;
			}
			switch (alt40) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:39: ext= KW_EXTERNAL
					{
					ext=(Token)match(input,KW_EXTERNAL,FOLLOW_KW_EXTERNAL_in_createTableStatement3640); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_EXTERNAL.add(ext);

					}
					break;

			}

			KW_TABLE157=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_createTableStatement3644); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE157);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:66: ( ifNotExists )?
			int alt41=2;
			int LA41_0 = input.LA(1);
			if ( (LA41_0==KW_IF) ) {
				alt41=1;
			}
			switch (alt41) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1070:66: ifNotExists
					{
					pushFollow(FOLLOW_ifNotExists_in_createTableStatement3646);
					ifNotExists158=ifNotExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifNotExists.add(ifNotExists158.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_tableName_in_createTableStatement3651);
			name=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(name.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:7: (like= KW_LIKE likeName= tableName ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeOrConstraintList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? )
			int alt56=2;
			int LA56_0 = input.LA(1);
			if ( (LA56_0==KW_LIKE) ) {
				alt56=1;
			}
			else if ( (LA56_0==EOF||LA56_0==KW_AS||LA56_0==KW_CLUSTERED||LA56_0==KW_COMMENT||LA56_0==KW_LOCATION||LA56_0==KW_PARTITIONED||LA56_0==KW_ROW||LA56_0==KW_SKEWED||LA56_0==KW_STORED||LA56_0==KW_TBLPROPERTIES||LA56_0==LPAREN) ) {
				alt56=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 56, 0, input);
				throw nvae;
			}

			switch (alt56) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:10: like= KW_LIKE likeName= tableName ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )?
					{
					like=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_createTableStatement3664); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LIKE.add(like);

					pushFollow(FOLLOW_tableName_in_createTableStatement3668);
					likeName=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(likeName.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1072:10: ( tableRowFormat )?
					int alt42=2;
					int LA42_0 = input.LA(1);
					if ( (LA42_0==KW_ROW) ) {
						alt42=1;
					}
					switch (alt42) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1072:10: tableRowFormat
							{
							pushFollow(FOLLOW_tableRowFormat_in_createTableStatement3679);
							tableRowFormat159=tableRowFormat();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableRowFormat.add(tableRowFormat159.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1073:10: ( tableFileFormat )?
					int alt43=2;
					int LA43_0 = input.LA(1);
					if ( (LA43_0==KW_STORED) ) {
						alt43=1;
					}
					switch (alt43) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1073:10: tableFileFormat
							{
							pushFollow(FOLLOW_tableFileFormat_in_createTableStatement3691);
							tableFileFormat160=tableFileFormat();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableFileFormat.add(tableFileFormat160.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1074:10: ( tableLocation )?
					int alt44=2;
					int LA44_0 = input.LA(1);
					if ( (LA44_0==KW_LOCATION) ) {
						alt44=1;
					}
					switch (alt44) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1074:10: tableLocation
							{
							pushFollow(FOLLOW_tableLocation_in_createTableStatement3703);
							tableLocation161=tableLocation();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableLocation.add(tableLocation161.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1075:10: ( tablePropertiesPrefixed )?
					int alt45=2;
					int LA45_0 = input.LA(1);
					if ( (LA45_0==KW_TBLPROPERTIES) ) {
						alt45=1;
					}
					switch (alt45) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1075:10: tablePropertiesPrefixed
							{
							pushFollow(FOLLOW_tablePropertiesPrefixed_in_createTableStatement3715);
							tablePropertiesPrefixed162=tablePropertiesPrefixed();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed162.getTree());
							}
							break;

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1076:10: ( LPAREN columnNameTypeOrConstraintList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )?
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1076:10: ( LPAREN columnNameTypeOrConstraintList RPAREN )?
					int alt46=2;
					int LA46_0 = input.LA(1);
					if ( (LA46_0==LPAREN) ) {
						alt46=1;
					}
					switch (alt46) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1076:11: LPAREN columnNameTypeOrConstraintList RPAREN
							{
							LPAREN163=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_createTableStatement3728); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN163);

							pushFollow(FOLLOW_columnNameTypeOrConstraintList_in_createTableStatement3730);
							columnNameTypeOrConstraintList164=columnNameTypeOrConstraintList();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_columnNameTypeOrConstraintList.add(columnNameTypeOrConstraintList164.getTree());
							RPAREN165=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_createTableStatement3732); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN165);

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1077:10: ( tableComment )?
					int alt47=2;
					int LA47_0 = input.LA(1);
					if ( (LA47_0==KW_COMMENT) ) {
						alt47=1;
					}
					switch (alt47) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1077:10: tableComment
							{
							pushFollow(FOLLOW_tableComment_in_createTableStatement3745);
							tableComment166=tableComment();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableComment.add(tableComment166.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1078:10: ( tablePartition )?
					int alt48=2;
					int LA48_0 = input.LA(1);
					if ( (LA48_0==KW_PARTITIONED) ) {
						alt48=1;
					}
					switch (alt48) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1078:10: tablePartition
							{
							pushFollow(FOLLOW_tablePartition_in_createTableStatement3757);
							tablePartition167=tablePartition();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tablePartition.add(tablePartition167.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1079:10: ( tableBuckets )?
					int alt49=2;
					int LA49_0 = input.LA(1);
					if ( (LA49_0==KW_CLUSTERED) ) {
						alt49=1;
					}
					switch (alt49) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1079:10: tableBuckets
							{
							pushFollow(FOLLOW_tableBuckets_in_createTableStatement3769);
							tableBuckets168=tableBuckets();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableBuckets.add(tableBuckets168.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1080:10: ( tableSkewed )?
					int alt50=2;
					int LA50_0 = input.LA(1);
					if ( (LA50_0==KW_SKEWED) ) {
						alt50=1;
					}
					switch (alt50) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1080:10: tableSkewed
							{
							pushFollow(FOLLOW_tableSkewed_in_createTableStatement3781);
							tableSkewed169=tableSkewed();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableSkewed.add(tableSkewed169.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1081:10: ( tableRowFormat )?
					int alt51=2;
					int LA51_0 = input.LA(1);
					if ( (LA51_0==KW_ROW) ) {
						alt51=1;
					}
					switch (alt51) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1081:10: tableRowFormat
							{
							pushFollow(FOLLOW_tableRowFormat_in_createTableStatement3793);
							tableRowFormat170=tableRowFormat();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableRowFormat.add(tableRowFormat170.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1082:10: ( tableFileFormat )?
					int alt52=2;
					int LA52_0 = input.LA(1);
					if ( (LA52_0==KW_STORED) ) {
						alt52=1;
					}
					switch (alt52) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1082:10: tableFileFormat
							{
							pushFollow(FOLLOW_tableFileFormat_in_createTableStatement3805);
							tableFileFormat171=tableFileFormat();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableFileFormat.add(tableFileFormat171.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1083:10: ( tableLocation )?
					int alt53=2;
					int LA53_0 = input.LA(1);
					if ( (LA53_0==KW_LOCATION) ) {
						alt53=1;
					}
					switch (alt53) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1083:10: tableLocation
							{
							pushFollow(FOLLOW_tableLocation_in_createTableStatement3817);
							tableLocation172=tableLocation();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableLocation.add(tableLocation172.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1084:10: ( tablePropertiesPrefixed )?
					int alt54=2;
					int LA54_0 = input.LA(1);
					if ( (LA54_0==KW_TBLPROPERTIES) ) {
						alt54=1;
					}
					switch (alt54) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1084:10: tablePropertiesPrefixed
							{
							pushFollow(FOLLOW_tablePropertiesPrefixed_in_createTableStatement3829);
							tablePropertiesPrefixed173=tablePropertiesPrefixed();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed173.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1085:10: ( KW_AS selectStatementWithCTE )?
					int alt55=2;
					int LA55_0 = input.LA(1);
					if ( (LA55_0==KW_AS) ) {
						alt55=1;
					}
					switch (alt55) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1085:11: KW_AS selectStatementWithCTE
							{
							KW_AS174=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createTableStatement3842); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS174);

							pushFollow(FOLLOW_selectStatementWithCTE_in_createTableStatement3844);
							selectStatementWithCTE175=selectStatementWithCTE();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_selectStatementWithCTE.add(selectStatementWithCTE175.getTree());
							}
							break;

					}

					}
					break;

			}

			// AST REWRITE
			// elements: tableComment, tableRowFormat, ifNotExists, tablePartition, tableFileFormat, columnNameTypeOrConstraintList, tableSkewed, ext, temp, likeName, tableLocation, tablePropertiesPrefixed, tableBuckets, name, selectStatementWithCTE
			// token labels: ext, temp
			// rule labels: likeName, name, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_ext=new RewriteRuleTokenStream(adaptor,"token ext",ext);
			RewriteRuleTokenStream stream_temp=new RewriteRuleTokenStream(adaptor,"token temp",temp);
			RewriteRuleSubtreeStream stream_likeName=new RewriteRuleSubtreeStream(adaptor,"rule likeName",likeName!=null?likeName.getTree():null);
			RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1087:5: -> ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeOrConstraintList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1087:8: ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeOrConstraintList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATETABLE, "TOK_CREATETABLE"), root_1);
				adaptor.addChild(root_1, stream_name.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1087:33: ( $temp)?
				if ( stream_temp.hasNext() ) {
					adaptor.addChild(root_1, stream_temp.nextNode());
				}
				stream_temp.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1087:40: ( $ext)?
				if ( stream_ext.hasNext() ) {
					adaptor.addChild(root_1, stream_ext.nextNode());
				}
				stream_ext.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1087:45: ( ifNotExists )?
				if ( stream_ifNotExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifNotExists.nextTree());
				}
				stream_ifNotExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1088:10: ^( TOK_LIKETABLE ( $likeName)? )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_LIKETABLE, "TOK_LIKETABLE"), root_2);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1088:27: ( $likeName)?
				if ( stream_likeName.hasNext() ) {
					adaptor.addChild(root_2, stream_likeName.nextTree());
				}
				stream_likeName.reset();

				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1089:10: ( columnNameTypeOrConstraintList )?
				if ( stream_columnNameTypeOrConstraintList.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameTypeOrConstraintList.nextTree());
				}
				stream_columnNameTypeOrConstraintList.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1090:10: ( tableComment )?
				if ( stream_tableComment.hasNext() ) {
					adaptor.addChild(root_1, stream_tableComment.nextTree());
				}
				stream_tableComment.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1091:10: ( tablePartition )?
				if ( stream_tablePartition.hasNext() ) {
					adaptor.addChild(root_1, stream_tablePartition.nextTree());
				}
				stream_tablePartition.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1092:10: ( tableBuckets )?
				if ( stream_tableBuckets.hasNext() ) {
					adaptor.addChild(root_1, stream_tableBuckets.nextTree());
				}
				stream_tableBuckets.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1093:10: ( tableSkewed )?
				if ( stream_tableSkewed.hasNext() ) {
					adaptor.addChild(root_1, stream_tableSkewed.nextTree());
				}
				stream_tableSkewed.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1094:10: ( tableRowFormat )?
				if ( stream_tableRowFormat.hasNext() ) {
					adaptor.addChild(root_1, stream_tableRowFormat.nextTree());
				}
				stream_tableRowFormat.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1095:10: ( tableFileFormat )?
				if ( stream_tableFileFormat.hasNext() ) {
					adaptor.addChild(root_1, stream_tableFileFormat.nextTree());
				}
				stream_tableFileFormat.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1096:10: ( tableLocation )?
				if ( stream_tableLocation.hasNext() ) {
					adaptor.addChild(root_1, stream_tableLocation.nextTree());
				}
				stream_tableLocation.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1097:10: ( tablePropertiesPrefixed )?
				if ( stream_tablePropertiesPrefixed.hasNext() ) {
					adaptor.addChild(root_1, stream_tablePropertiesPrefixed.nextTree());
				}
				stream_tablePropertiesPrefixed.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1098:10: ( selectStatementWithCTE )?
				if ( stream_selectStatementWithCTE.hasNext() ) {
					adaptor.addChild(root_1, stream_selectStatementWithCTE.nextTree());
				}
				stream_selectStatementWithCTE.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createTableStatement"


	public static class truncateTableStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "truncateTableStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1102:1: truncateTableStatement : KW_TRUNCATE KW_TABLE tablePartitionPrefix ( KW_COLUMNS LPAREN columnNameList RPAREN )? -> ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? ) ;
	public final HiveParser.truncateTableStatement_return truncateTableStatement() throws RecognitionException {
		HiveParser.truncateTableStatement_return retval = new HiveParser.truncateTableStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_TRUNCATE176=null;
		Token KW_TABLE177=null;
		Token KW_COLUMNS179=null;
		Token LPAREN180=null;
		Token RPAREN182=null;
		ParserRuleReturnScope tablePartitionPrefix178 =null;
		ParserRuleReturnScope columnNameList181 =null;

		ASTNode KW_TRUNCATE176_tree=null;
		ASTNode KW_TABLE177_tree=null;
		ASTNode KW_COLUMNS179_tree=null;
		ASTNode LPAREN180_tree=null;
		ASTNode RPAREN182_tree=null;
		RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_TRUNCATE=new RewriteRuleTokenStream(adaptor,"token KW_TRUNCATE");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_tablePartitionPrefix=new RewriteRuleSubtreeStream(adaptor,"rule tablePartitionPrefix");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");

		 pushMsg("truncate table statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:5: ( KW_TRUNCATE KW_TABLE tablePartitionPrefix ( KW_COLUMNS LPAREN columnNameList RPAREN )? -> ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:7: KW_TRUNCATE KW_TABLE tablePartitionPrefix ( KW_COLUMNS LPAREN columnNameList RPAREN )?
			{
			KW_TRUNCATE176=(Token)match(input,KW_TRUNCATE,FOLLOW_KW_TRUNCATE_in_truncateTableStatement4051); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TRUNCATE.add(KW_TRUNCATE176);

			KW_TABLE177=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_truncateTableStatement4053); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE177);

			pushFollow(FOLLOW_tablePartitionPrefix_in_truncateTableStatement4055);
			tablePartitionPrefix178=tablePartitionPrefix();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tablePartitionPrefix.add(tablePartitionPrefix178.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:49: ( KW_COLUMNS LPAREN columnNameList RPAREN )?
			int alt57=2;
			int LA57_0 = input.LA(1);
			if ( (LA57_0==KW_COLUMNS) ) {
				alt57=1;
			}
			switch (alt57) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:50: KW_COLUMNS LPAREN columnNameList RPAREN
					{
					KW_COLUMNS179=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_truncateTableStatement4058); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COLUMNS.add(KW_COLUMNS179);

					LPAREN180=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_truncateTableStatement4060); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN180);

					pushFollow(FOLLOW_columnNameList_in_truncateTableStatement4062);
					columnNameList181=columnNameList();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameList.add(columnNameList181.getTree());
					RPAREN182=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_truncateTableStatement4064); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN182);

					}
					break;

			}

			// AST REWRITE
			// elements: columnNameList, tablePartitionPrefix
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1105:92: -> ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:95: ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TRUNCATETABLE, "TOK_TRUNCATETABLE"), root_1);
				adaptor.addChild(root_1, stream_tablePartitionPrefix.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:136: ( columnNameList )?
				if ( stream_columnNameList.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameList.nextTree());
				}
				stream_columnNameList.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "truncateTableStatement"


	public static class dropTableStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dropTableStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1107:1: dropTableStatement : KW_DROP KW_TABLE ( ifExists )? tableName ( KW_PURGE )? ( replicationClause )? -> ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? ( replicationClause )? ) ;
	public final HiveParser.dropTableStatement_return dropTableStatement() throws RecognitionException {
		HiveParser.dropTableStatement_return retval = new HiveParser.dropTableStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP183=null;
		Token KW_TABLE184=null;
		Token KW_PURGE187=null;
		ParserRuleReturnScope ifExists185 =null;
		ParserRuleReturnScope tableName186 =null;
		ParserRuleReturnScope replicationClause188 =null;

		ASTNode KW_DROP183_tree=null;
		ASTNode KW_TABLE184_tree=null;
		ASTNode KW_PURGE187_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_PURGE=new RewriteRuleTokenStream(adaptor,"token KW_PURGE");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
		RewriteRuleSubtreeStream stream_replicationClause=new RewriteRuleSubtreeStream(adaptor,"rule replicationClause");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("drop statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:5: ( KW_DROP KW_TABLE ( ifExists )? tableName ( KW_PURGE )? ( replicationClause )? -> ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? ( replicationClause )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:7: KW_DROP KW_TABLE ( ifExists )? tableName ( KW_PURGE )? ( replicationClause )?
			{
			KW_DROP183=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropTableStatement4099); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP183);

			KW_TABLE184=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_dropTableStatement4101); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE184);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:24: ( ifExists )?
			int alt58=2;
			int LA58_0 = input.LA(1);
			if ( (LA58_0==KW_IF) ) {
				alt58=1;
			}
			switch (alt58) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:24: ifExists
					{
					pushFollow(FOLLOW_ifExists_in_dropTableStatement4103);
					ifExists185=ifExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifExists.add(ifExists185.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_tableName_in_dropTableStatement4106);
			tableName186=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName186.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:44: ( KW_PURGE )?
			int alt59=2;
			int LA59_0 = input.LA(1);
			if ( (LA59_0==KW_PURGE) ) {
				alt59=1;
			}
			switch (alt59) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:44: KW_PURGE
					{
					KW_PURGE187=(Token)match(input,KW_PURGE,FOLLOW_KW_PURGE_in_dropTableStatement4108); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_PURGE.add(KW_PURGE187);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:54: ( replicationClause )?
			int alt60=2;
			int LA60_0 = input.LA(1);
			if ( (LA60_0==KW_FOR) ) {
				alt60=1;
			}
			switch (alt60) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1110:54: replicationClause
					{
					pushFollow(FOLLOW_replicationClause_in_dropTableStatement4111);
					replicationClause188=replicationClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_replicationClause.add(replicationClause188.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: replicationClause, KW_PURGE, tableName, ifExists
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1111:5: -> ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? ( replicationClause )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:8: ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? ( replicationClause )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROPTABLE, "TOK_DROPTABLE"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:34: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:44: ( KW_PURGE )?
				if ( stream_KW_PURGE.hasNext() ) {
					adaptor.addChild(root_1, stream_KW_PURGE.nextNode());
				}
				stream_KW_PURGE.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:54: ( replicationClause )?
				if ( stream_replicationClause.hasNext() ) {
					adaptor.addChild(root_1, stream_replicationClause.nextTree());
				}
				stream_replicationClause.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dropTableStatement"


	public static class alterStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1114:1: alterStatement : ( KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^( TOK_ALTERTABLE tableName alterTableStatementSuffix ) | KW_ALTER KW_VIEW tableName ( KW_AS )? alterViewStatementSuffix -> ^( TOK_ALTERVIEW tableName alterViewStatementSuffix ) | KW_ALTER KW_MATERIALIZED KW_VIEW tableName alterMaterializedViewStatementSuffix -> ^( TOK_ALTER_MATERIALIZED_VIEW tableName alterMaterializedViewStatementSuffix ) | KW_ALTER ( KW_DATABASE | KW_SCHEMA ) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix );
	public final HiveParser.alterStatement_return alterStatement() throws RecognitionException {
		HiveParser.alterStatement_return retval = new HiveParser.alterStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ALTER189=null;
		Token KW_TABLE190=null;
		Token KW_ALTER193=null;
		Token KW_VIEW194=null;
		Token KW_AS196=null;
		Token KW_ALTER198=null;
		Token KW_MATERIALIZED199=null;
		Token KW_VIEW200=null;
		Token KW_ALTER203=null;
		Token KW_DATABASE204=null;
		Token KW_SCHEMA205=null;
		ParserRuleReturnScope tableName191 =null;
		ParserRuleReturnScope alterTableStatementSuffix192 =null;
		ParserRuleReturnScope tableName195 =null;
		ParserRuleReturnScope alterViewStatementSuffix197 =null;
		ParserRuleReturnScope tableName201 =null;
		ParserRuleReturnScope alterMaterializedViewStatementSuffix202 =null;
		ParserRuleReturnScope alterDatabaseStatementSuffix206 =null;

		ASTNode KW_ALTER189_tree=null;
		ASTNode KW_TABLE190_tree=null;
		ASTNode KW_ALTER193_tree=null;
		ASTNode KW_VIEW194_tree=null;
		ASTNode KW_AS196_tree=null;
		ASTNode KW_ALTER198_tree=null;
		ASTNode KW_MATERIALIZED199_tree=null;
		ASTNode KW_VIEW200_tree=null;
		ASTNode KW_ALTER203_tree=null;
		ASTNode KW_DATABASE204_tree=null;
		ASTNode KW_SCHEMA205_tree=null;
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_KW_ALTER=new RewriteRuleTokenStream(adaptor,"token KW_ALTER");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleTokenStream stream_KW_MATERIALIZED=new RewriteRuleTokenStream(adaptor,"token KW_MATERIALIZED");
		RewriteRuleSubtreeStream stream_alterMaterializedViewStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterMaterializedViewStatementSuffix");
		RewriteRuleSubtreeStream stream_alterTableStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterTableStatementSuffix");
		RewriteRuleSubtreeStream stream_alterViewStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterViewStatementSuffix");
		RewriteRuleSubtreeStream stream_alterDatabaseStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterDatabaseStatementSuffix");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("alter statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1117:5: ( KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^( TOK_ALTERTABLE tableName alterTableStatementSuffix ) | KW_ALTER KW_VIEW tableName ( KW_AS )? alterViewStatementSuffix -> ^( TOK_ALTERVIEW tableName alterViewStatementSuffix ) | KW_ALTER KW_MATERIALIZED KW_VIEW tableName alterMaterializedViewStatementSuffix -> ^( TOK_ALTER_MATERIALIZED_VIEW tableName alterMaterializedViewStatementSuffix ) | KW_ALTER ( KW_DATABASE | KW_SCHEMA ) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix )
			int alt63=4;
			int LA63_0 = input.LA(1);
			if ( (LA63_0==KW_ALTER) ) {
				switch ( input.LA(2) ) {
				case KW_TABLE:
					{
					alt63=1;
					}
					break;
				case KW_VIEW:
					{
					alt63=2;
					}
					break;
				case KW_MATERIALIZED:
					{
					alt63=3;
					}
					break;
				case KW_DATABASE:
				case KW_SCHEMA:
					{
					alt63=4;
					}
					break;
				default:
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 63, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 63, 0, input);
				throw nvae;
			}

			switch (alt63) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1117:7: KW_ALTER KW_TABLE tableName alterTableStatementSuffix
					{
					KW_ALTER189=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement4160); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALTER.add(KW_ALTER189);

					KW_TABLE190=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_alterStatement4162); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE190);

					pushFollow(FOLLOW_tableName_in_alterStatement4164);
					tableName191=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName191.getTree());
					pushFollow(FOLLOW_alterTableStatementSuffix_in_alterStatement4166);
					alterTableStatementSuffix192=alterTableStatementSuffix();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterTableStatementSuffix.add(alterTableStatementSuffix192.getTree());
					// AST REWRITE
					// elements: alterTableStatementSuffix, tableName
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1117:61: -> ^( TOK_ALTERTABLE tableName alterTableStatementSuffix )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1117:64: ^( TOK_ALTERTABLE tableName alterTableStatementSuffix )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE, "TOK_ALTERTABLE"), root_1);
						adaptor.addChild(root_1, stream_tableName.nextTree());
						adaptor.addChild(root_1, stream_alterTableStatementSuffix.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1118:7: KW_ALTER KW_VIEW tableName ( KW_AS )? alterViewStatementSuffix
					{
					KW_ALTER193=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement4184); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALTER.add(KW_ALTER193);

					KW_VIEW194=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_alterStatement4186); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_VIEW.add(KW_VIEW194);

					pushFollow(FOLLOW_tableName_in_alterStatement4188);
					tableName195=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName195.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1118:34: ( KW_AS )?
					int alt61=2;
					int LA61_0 = input.LA(1);
					if ( (LA61_0==KW_AS) ) {
						alt61=1;
					}
					switch (alt61) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1118:34: KW_AS
							{
							KW_AS196=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_alterStatement4190); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS196);

							}
							break;

					}

					pushFollow(FOLLOW_alterViewStatementSuffix_in_alterStatement4193);
					alterViewStatementSuffix197=alterViewStatementSuffix();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterViewStatementSuffix.add(alterViewStatementSuffix197.getTree());
					// AST REWRITE
					// elements: alterViewStatementSuffix, tableName
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1118:66: -> ^( TOK_ALTERVIEW tableName alterViewStatementSuffix )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1118:69: ^( TOK_ALTERVIEW tableName alterViewStatementSuffix )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERVIEW, "TOK_ALTERVIEW"), root_1);
						adaptor.addChild(root_1, stream_tableName.nextTree());
						adaptor.addChild(root_1, stream_alterViewStatementSuffix.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1119:7: KW_ALTER KW_MATERIALIZED KW_VIEW tableName alterMaterializedViewStatementSuffix
					{
					KW_ALTER198=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement4211); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALTER.add(KW_ALTER198);

					KW_MATERIALIZED199=(Token)match(input,KW_MATERIALIZED,FOLLOW_KW_MATERIALIZED_in_alterStatement4213); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_MATERIALIZED.add(KW_MATERIALIZED199);

					KW_VIEW200=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_alterStatement4215); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_VIEW.add(KW_VIEW200);

					pushFollow(FOLLOW_tableName_in_alterStatement4217);
					tableName201=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName201.getTree());
					pushFollow(FOLLOW_alterMaterializedViewStatementSuffix_in_alterStatement4219);
					alterMaterializedViewStatementSuffix202=alterMaterializedViewStatementSuffix();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterMaterializedViewStatementSuffix.add(alterMaterializedViewStatementSuffix202.getTree());
					// AST REWRITE
					// elements: tableName, alterMaterializedViewStatementSuffix
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1120:5: -> ^( TOK_ALTER_MATERIALIZED_VIEW tableName alterMaterializedViewStatementSuffix )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1120:8: ^( TOK_ALTER_MATERIALIZED_VIEW tableName alterMaterializedViewStatementSuffix )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTER_MATERIALIZED_VIEW, "TOK_ALTER_MATERIALIZED_VIEW"), root_1);
						adaptor.addChild(root_1, stream_tableName.nextTree());
						adaptor.addChild(root_1, stream_alterMaterializedViewStatementSuffix.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1121:7: KW_ALTER ( KW_DATABASE | KW_SCHEMA ) alterDatabaseStatementSuffix
					{
					KW_ALTER203=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement4241); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALTER.add(KW_ALTER203);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1121:16: ( KW_DATABASE | KW_SCHEMA )
					int alt62=2;
					int LA62_0 = input.LA(1);
					if ( (LA62_0==KW_DATABASE) ) {
						alt62=1;
					}
					else if ( (LA62_0==KW_SCHEMA) ) {
						alt62=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 62, 0, input);
						throw nvae;
					}

					switch (alt62) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1121:17: KW_DATABASE
							{
							KW_DATABASE204=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_alterStatement4244); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE204);

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1121:29: KW_SCHEMA
							{
							KW_SCHEMA205=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_alterStatement4246); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA205);

							}
							break;

					}

					pushFollow(FOLLOW_alterDatabaseStatementSuffix_in_alterStatement4249);
					alterDatabaseStatementSuffix206=alterDatabaseStatementSuffix();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterDatabaseStatementSuffix.add(alterDatabaseStatementSuffix206.getTree());
					// AST REWRITE
					// elements: alterDatabaseStatementSuffix
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1121:69: -> alterDatabaseStatementSuffix
					{
						adaptor.addChild(root_0, stream_alterDatabaseStatementSuffix.nextTree());
					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatement"


	public static class alterTableStatementSuffix_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterTableStatementSuffix"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1124:1: alterTableStatementSuffix : ( ( alterStatementSuffixRename[true] )=> alterStatementSuffixRename[true] | alterStatementSuffixDropPartitions[true] | alterStatementSuffixAddPartitions[true] | alterStatementSuffixTouch | alterStatementSuffixArchive | alterStatementSuffixUnArchive | alterStatementSuffixProperties | alterStatementSuffixSkewedby | alterStatementSuffixExchangePartition | alterStatementPartitionKeyType | alterStatementSuffixDropConstraint | alterStatementSuffixAddConstraint | ( partitionSpec )? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix ( partitionSpec )? | alterStatementSuffixSetOwner );
	public final HiveParser.alterTableStatementSuffix_return alterTableStatementSuffix() throws RecognitionException {
		HiveParser.alterTableStatementSuffix_return retval = new HiveParser.alterTableStatementSuffix_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope alterStatementSuffixRename207 =null;
		ParserRuleReturnScope alterStatementSuffixDropPartitions208 =null;
		ParserRuleReturnScope alterStatementSuffixAddPartitions209 =null;
		ParserRuleReturnScope alterStatementSuffixTouch210 =null;
		ParserRuleReturnScope alterStatementSuffixArchive211 =null;
		ParserRuleReturnScope alterStatementSuffixUnArchive212 =null;
		ParserRuleReturnScope alterStatementSuffixProperties213 =null;
		ParserRuleReturnScope alterStatementSuffixSkewedby214 =null;
		ParserRuleReturnScope alterStatementSuffixExchangePartition215 =null;
		ParserRuleReturnScope alterStatementPartitionKeyType216 =null;
		ParserRuleReturnScope alterStatementSuffixDropConstraint217 =null;
		ParserRuleReturnScope alterStatementSuffixAddConstraint218 =null;
		ParserRuleReturnScope partitionSpec219 =null;
		ParserRuleReturnScope alterTblPartitionStatementSuffix220 =null;
		ParserRuleReturnScope alterStatementSuffixSetOwner221 =null;

		RewriteRuleSubtreeStream stream_alterTblPartitionStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterTblPartitionStatementSuffix");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");

		 pushMsg("alter table statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1127:5: ( ( alterStatementSuffixRename[true] )=> alterStatementSuffixRename[true] | alterStatementSuffixDropPartitions[true] | alterStatementSuffixAddPartitions[true] | alterStatementSuffixTouch | alterStatementSuffixArchive | alterStatementSuffixUnArchive | alterStatementSuffixProperties | alterStatementSuffixSkewedby | alterStatementSuffixExchangePartition | alterStatementPartitionKeyType | alterStatementSuffixDropConstraint | alterStatementSuffixAddConstraint | ( partitionSpec )? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix ( partitionSpec )? | alterStatementSuffixSetOwner )
			int alt65=14;
			switch ( input.LA(1) ) {
			case KW_RENAME:
				{
				int LA65_1 = input.LA(2);
				if ( (LA65_1==KW_TO) ) {
					int LA65_20 = input.LA(3);
					if ( (LA65_20==Identifier) && (synpred3_HiveParser())) {
						alt65=1;
					}
					else if ( ((LA65_20 >= KW_ABORT && LA65_20 <= KW_AFTER)||LA65_20==KW_ALLOC_FRACTION||LA65_20==KW_ANALYZE||LA65_20==KW_ARCHIVE||LA65_20==KW_ASC||(LA65_20 >= KW_AUTOCOMMIT && LA65_20 <= KW_BEFORE)||(LA65_20 >= KW_BUCKET && LA65_20 <= KW_BUCKETS)||(LA65_20 >= KW_CACHE && LA65_20 <= KW_CASCADE)||LA65_20==KW_CHANGE||(LA65_20 >= KW_CHECK && LA65_20 <= KW_COLLECTION)||(LA65_20 >= KW_COLUMNS && LA65_20 <= KW_COMMENT)||(LA65_20 >= KW_COMPACT && LA65_20 <= KW_CONCATENATE)||LA65_20==KW_CONTINUE||LA65_20==KW_DATA||LA65_20==KW_DATABASES||(LA65_20 >= KW_DATETIME && LA65_20 <= KW_DBPROPERTIES)||(LA65_20 >= KW_DEFAULT && LA65_20 <= KW_DEFINED)||(LA65_20 >= KW_DELIMITED && LA65_20 <= KW_DESC)||(LA65_20 >= KW_DETAIL && LA65_20 <= KW_DISABLE)||(LA65_20 >= KW_DISTRIBUTE && LA65_20 <= KW_DO)||LA65_20==KW_DOW||(LA65_20 >= KW_DUMP && LA65_20 <= KW_ELEM_TYPE)||LA65_20==KW_ENABLE||(LA65_20 >= KW_ENFORCED && LA65_20 <= KW_ESCAPED)||LA65_20==KW_EXCLUSIVE||(LA65_20 >= KW_EXPLAIN && LA65_20 <= KW_EXPRESSION)||(LA65_20 >= KW_FIELDS && LA65_20 <= KW_FIRST)||(LA65_20 >= KW_FORMAT && LA65_20 <= KW_FORMATTED)||LA65_20==KW_FUNCTIONS||(LA65_20 >= KW_HOUR && LA65_20 <= KW_IDXPROPERTIES)||(LA65_20 >= KW_INDEX && LA65_20 <= KW_INDEXES)||(LA65_20 >= KW_INPATH && LA65_20 <= KW_INPUTFORMAT)||(LA65_20 >= KW_ISOLATION && LA65_20 <= KW_JAR)||(LA65_20 >= KW_KEY && LA65_20 <= KW_LAST)||LA65_20==KW_LEVEL||(LA65_20 >= KW_LIMIT && LA65_20 <= KW_LOAD)||(LA65_20 >= KW_LOCATION && LA65_20 <= KW_LONG)||LA65_20==KW_MANAGEMENT||(LA65_20 >= KW_MAPJOIN && LA65_20 <= KW_MATERIALIZED)||LA65_20==KW_METADATA||(LA65_20 >= KW_MINUTE && LA65_20 <= KW_MONTH)||(LA65_20 >= KW_MOVE && LA65_20 <= KW_MSCK)||(LA65_20 >= KW_NORELY && LA65_20 <= KW_NOSCAN)||LA65_20==KW_NOVALIDATE||LA65_20==KW_NULLS||LA65_20==KW_OFFSET||(LA65_20 >= KW_OPERATOR && LA65_20 <= KW_OPTION)||(LA65_20 >= KW_OUTPUTDRIVER && LA65_20 <= KW_OUTPUTFORMAT)||(LA65_20 >= KW_OVERWRITE && LA65_20 <= KW_OWNER)||(LA65_20 >= KW_PARTITIONED && LA65_20 <= KW_PATH)||(LA65_20 >= KW_PLAN && LA65_20 <= KW_POOL)||LA65_20==KW_PRINCIPALS||(LA65_20 >= KW_PURGE && LA65_20 <= KW_QUERY_PARALLELISM)||LA65_20==KW_READ||(LA65_20 >= KW_REBUILD && LA65_20 <= KW_RECORDWRITER)||(LA65_20 >= KW_RELOAD && LA65_20 <= KW_RESTRICT)||LA65_20==KW_REWRITE||(LA65_20 >= KW_ROLE && LA65_20 <= KW_ROLES)||(LA65_20 >= KW_SCHEDULING_POLICY && LA65_20 <= KW_SECOND)||(LA65_20 >= KW_SEMI && LA65_20 <= KW_SERVER)||(LA65_20 >= KW_SETS && LA65_20 <= KW_SKEWED)||(LA65_20 >= KW_SNAPSHOT && LA65_20 <= KW_SSL)||(LA65_20 >= KW_STATISTICS && LA65_20 <= KW_SUMMARY)||LA65_20==KW_TABLES||(LA65_20 >= KW_TBLPROPERTIES && LA65_20 <= KW_TERMINATED)||LA65_20==KW_TINYINT||(LA65_20 >= KW_TOUCH && LA65_20 <= KW_TRANSACTIONS)||LA65_20==KW_UNARCHIVE||LA65_20==KW_UNDO||LA65_20==KW_UNIONTYPE||(LA65_20 >= KW_UNLOCK && LA65_20 <= KW_UNSIGNED)||(LA65_20 >= KW_URI && LA65_20 <= KW_USE)||(LA65_20 >= KW_UTC && LA65_20 <= KW_VALIDATE)||LA65_20==KW_VALUE_TYPE||(LA65_20 >= KW_VECTORIZATION && LA65_20 <= KW_WEEK)||LA65_20==KW_WHILE||(LA65_20 >= KW_WORK && LA65_20 <= KW_ZONE)||LA65_20==KW_BATCH||LA65_20==KW_DAYOFWEEK||LA65_20==KW_HOLD_DDLTIME||LA65_20==KW_IGNORE||LA65_20==KW_NO_DROP||LA65_20==KW_OFFLINE||LA65_20==KW_PROTECTION||LA65_20==KW_READONLY||LA65_20==KW_TIMESTAMPTZ) && (synpred3_HiveParser())) {
						alt65=1;
					}
					else if ( (LA65_20==KW_PARTITION) ) {
						alt65=13;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 65, 20, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 65, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_DROP:
				{
				int LA65_2 = input.LA(2);
				if ( (LA65_2==KW_CONSTRAINT) ) {
					alt65=11;
				}
				else if ( (LA65_2==KW_IF||LA65_2==KW_PARTITION) ) {
					alt65=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 65, 2, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_ADD:
				{
				switch ( input.LA(2) ) {
				case KW_IF:
				case KW_PARTITION:
					{
					alt65=3;
					}
					break;
				case KW_CONSTRAINT:
					{
					alt65=12;
					}
					break;
				case KW_COLUMNS:
					{
					alt65=13;
					}
					break;
				default:
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 65, 3, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}
				}
				break;
			case KW_TOUCH:
				{
				alt65=4;
				}
				break;
			case KW_ARCHIVE:
				{
				alt65=5;
				}
				break;
			case KW_UNARCHIVE:
				{
				alt65=6;
				}
				break;
			case KW_SET:
				{
				switch ( input.LA(2) ) {
				case KW_TBLPROPERTIES:
					{
					alt65=7;
					}
					break;
				case KW_FILEFORMAT:
				case KW_LOCATION:
				case KW_SERDE:
				case KW_SERDEPROPERTIES:
				case KW_SKEWED:
					{
					alt65=13;
					}
					break;
				case KW_OWNER:
					{
					alt65=14;
					}
					break;
				default:
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 65, 7, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}
				}
				break;
			case KW_UNSET:
				{
				alt65=7;
				}
				break;
			case KW_SKEWED:
				{
				alt65=8;
				}
				break;
			case KW_NOT:
				{
				int LA65_10 = input.LA(2);
				if ( (LA65_10==KW_SKEWED||LA65_10==KW_STORED) ) {
					alt65=8;
				}
				else if ( (LA65_10==KW_CLUSTERED||LA65_10==KW_SORTED) ) {
					alt65=13;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 65, 10, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_EXCHANGE:
				{
				alt65=9;
				}
				break;
			case KW_PARTITION:
				{
				int LA65_12 = input.LA(2);
				if ( (LA65_12==KW_COLUMN) ) {
					alt65=10;
				}
				else if ( (LA65_12==LPAREN) ) {
					alt65=13;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 65, 12, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_CHANGE:
			case KW_CLUSTERED:
			case KW_COMPACT:
			case KW_CONCATENATE:
			case KW_INTO:
			case KW_REPLACE:
			case KW_UPDATE:
				{
				alt65=13;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 65, 0, input);
				throw nvae;
			}
			switch (alt65) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1127:7: ( alterStatementSuffixRename[true] )=> alterStatementSuffixRename[true]
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixRename_in_alterTableStatementSuffix4287);
					alterStatementSuffixRename207=alterStatementSuffixRename(true);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixRename207.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1128:7: alterStatementSuffixDropPartitions[true]
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixDropPartitions_in_alterTableStatementSuffix4296);
					alterStatementSuffixDropPartitions208=alterStatementSuffixDropPartitions(true);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixDropPartitions208.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1129:7: alterStatementSuffixAddPartitions[true]
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixAddPartitions_in_alterTableStatementSuffix4305);
					alterStatementSuffixAddPartitions209=alterStatementSuffixAddPartitions(true);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixAddPartitions209.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1130:7: alterStatementSuffixTouch
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixTouch_in_alterTableStatementSuffix4314);
					alterStatementSuffixTouch210=alterStatementSuffixTouch();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixTouch210.getTree());

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1131:7: alterStatementSuffixArchive
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixArchive_in_alterTableStatementSuffix4322);
					alterStatementSuffixArchive211=alterStatementSuffixArchive();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixArchive211.getTree());

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1132:7: alterStatementSuffixUnArchive
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixUnArchive_in_alterTableStatementSuffix4330);
					alterStatementSuffixUnArchive212=alterStatementSuffixUnArchive();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixUnArchive212.getTree());

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1133:7: alterStatementSuffixProperties
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixProperties_in_alterTableStatementSuffix4338);
					alterStatementSuffixProperties213=alterStatementSuffixProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixProperties213.getTree());

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1134:7: alterStatementSuffixSkewedby
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixSkewedby_in_alterTableStatementSuffix4346);
					alterStatementSuffixSkewedby214=alterStatementSuffixSkewedby();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixSkewedby214.getTree());

					}
					break;
				case 9 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1135:7: alterStatementSuffixExchangePartition
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixExchangePartition_in_alterTableStatementSuffix4354);
					alterStatementSuffixExchangePartition215=alterStatementSuffixExchangePartition();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixExchangePartition215.getTree());

					}
					break;
				case 10 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1136:7: alterStatementPartitionKeyType
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementPartitionKeyType_in_alterTableStatementSuffix4362);
					alterStatementPartitionKeyType216=alterStatementPartitionKeyType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementPartitionKeyType216.getTree());

					}
					break;
				case 11 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1137:7: alterStatementSuffixDropConstraint
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixDropConstraint_in_alterTableStatementSuffix4370);
					alterStatementSuffixDropConstraint217=alterStatementSuffixDropConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixDropConstraint217.getTree());

					}
					break;
				case 12 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1138:7: alterStatementSuffixAddConstraint
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixAddConstraint_in_alterTableStatementSuffix4378);
					alterStatementSuffixAddConstraint218=alterStatementSuffixAddConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixAddConstraint218.getTree());

					}
					break;
				case 13 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1139:7: ( partitionSpec )? alterTblPartitionStatementSuffix
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1139:7: ( partitionSpec )?
					int alt64=2;
					int LA64_0 = input.LA(1);
					if ( (LA64_0==KW_PARTITION) ) {
						alt64=1;
					}
					switch (alt64) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1139:7: partitionSpec
							{
							pushFollow(FOLLOW_partitionSpec_in_alterTableStatementSuffix4386);
							partitionSpec219=partitionSpec();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec219.getTree());
							}
							break;

					}

					pushFollow(FOLLOW_alterTblPartitionStatementSuffix_in_alterTableStatementSuffix4389);
					alterTblPartitionStatementSuffix220=alterTblPartitionStatementSuffix();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterTblPartitionStatementSuffix.add(alterTblPartitionStatementSuffix220.getTree());
					// AST REWRITE
					// elements: alterTblPartitionStatementSuffix, partitionSpec
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1139:55: -> alterTblPartitionStatementSuffix ( partitionSpec )?
					{
						adaptor.addChild(root_0, stream_alterTblPartitionStatementSuffix.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1139:91: ( partitionSpec )?
						if ( stream_partitionSpec.hasNext() ) {
							adaptor.addChild(root_0, stream_partitionSpec.nextTree());
						}
						stream_partitionSpec.reset();

					}


					retval.tree = root_0;
					}

					}
					break;
				case 14 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1140:7: alterStatementSuffixSetOwner
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixSetOwner_in_alterTableStatementSuffix4404);
					alterStatementSuffixSetOwner221=alterStatementSuffixSetOwner();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixSetOwner221.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterTableStatementSuffix"


	public static class alterTblPartitionStatementSuffix_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterTblPartitionStatementSuffix"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1143:1: alterTblPartitionStatementSuffix : ( alterStatementSuffixFileFormat | alterStatementSuffixLocation | alterStatementSuffixMergeFiles | alterStatementSuffixSerdeProperties | alterStatementSuffixRenamePart | alterStatementSuffixBucketNum | alterTblPartitionStatementSuffixSkewedLocation | alterStatementSuffixClusterbySortby | alterStatementSuffixCompact | alterStatementSuffixUpdateStatsCol | alterStatementSuffixUpdateStats | alterStatementSuffixRenameCol | alterStatementSuffixAddCol | alterStatementSuffixUpdateColumns );
	public final HiveParser.alterTblPartitionStatementSuffix_return alterTblPartitionStatementSuffix() throws RecognitionException {
		HiveParser.alterTblPartitionStatementSuffix_return retval = new HiveParser.alterTblPartitionStatementSuffix_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope alterStatementSuffixFileFormat222 =null;
		ParserRuleReturnScope alterStatementSuffixLocation223 =null;
		ParserRuleReturnScope alterStatementSuffixMergeFiles224 =null;
		ParserRuleReturnScope alterStatementSuffixSerdeProperties225 =null;
		ParserRuleReturnScope alterStatementSuffixRenamePart226 =null;
		ParserRuleReturnScope alterStatementSuffixBucketNum227 =null;
		ParserRuleReturnScope alterTblPartitionStatementSuffixSkewedLocation228 =null;
		ParserRuleReturnScope alterStatementSuffixClusterbySortby229 =null;
		ParserRuleReturnScope alterStatementSuffixCompact230 =null;
		ParserRuleReturnScope alterStatementSuffixUpdateStatsCol231 =null;
		ParserRuleReturnScope alterStatementSuffixUpdateStats232 =null;
		ParserRuleReturnScope alterStatementSuffixRenameCol233 =null;
		ParserRuleReturnScope alterStatementSuffixAddCol234 =null;
		ParserRuleReturnScope alterStatementSuffixUpdateColumns235 =null;


		pushMsg("alter table partition statement suffix", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1146:3: ( alterStatementSuffixFileFormat | alterStatementSuffixLocation | alterStatementSuffixMergeFiles | alterStatementSuffixSerdeProperties | alterStatementSuffixRenamePart | alterStatementSuffixBucketNum | alterTblPartitionStatementSuffixSkewedLocation | alterStatementSuffixClusterbySortby | alterStatementSuffixCompact | alterStatementSuffixUpdateStatsCol | alterStatementSuffixUpdateStats | alterStatementSuffixRenameCol | alterStatementSuffixAddCol | alterStatementSuffixUpdateColumns )
			int alt66=14;
			switch ( input.LA(1) ) {
			case KW_SET:
				{
				switch ( input.LA(2) ) {
				case KW_FILEFORMAT:
					{
					alt66=1;
					}
					break;
				case KW_LOCATION:
					{
					alt66=2;
					}
					break;
				case KW_SERDE:
				case KW_SERDEPROPERTIES:
					{
					alt66=4;
					}
					break;
				case KW_SKEWED:
					{
					alt66=7;
					}
					break;
				default:
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 66, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}
				}
				break;
			case KW_CONCATENATE:
				{
				alt66=3;
				}
				break;
			case KW_RENAME:
				{
				alt66=5;
				}
				break;
			case KW_INTO:
				{
				alt66=6;
				}
				break;
			case KW_CLUSTERED:
			case KW_NOT:
				{
				alt66=8;
				}
				break;
			case KW_COMPACT:
				{
				alt66=9;
				}
				break;
			case KW_UPDATE:
				{
				int LA66_8 = input.LA(2);
				if ( (LA66_8==KW_STATISTICS) ) {
					int LA66_17 = input.LA(3);
					if ( (LA66_17==KW_FOR) ) {
						alt66=10;
					}
					else if ( (LA66_17==KW_SET) ) {
						alt66=11;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 66, 17, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}
				else if ( (LA66_8==KW_COLUMNS) ) {
					alt66=14;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 66, 8, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_CHANGE:
				{
				alt66=12;
				}
				break;
			case KW_ADD:
			case KW_REPLACE:
				{
				alt66=13;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 66, 0, input);
				throw nvae;
			}
			switch (alt66) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1146:5: alterStatementSuffixFileFormat
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixFileFormat_in_alterTblPartitionStatementSuffix4429);
					alterStatementSuffixFileFormat222=alterStatementSuffixFileFormat();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixFileFormat222.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1147:5: alterStatementSuffixLocation
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixLocation_in_alterTblPartitionStatementSuffix4435);
					alterStatementSuffixLocation223=alterStatementSuffixLocation();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixLocation223.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1148:5: alterStatementSuffixMergeFiles
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixMergeFiles_in_alterTblPartitionStatementSuffix4441);
					alterStatementSuffixMergeFiles224=alterStatementSuffixMergeFiles();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixMergeFiles224.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1149:5: alterStatementSuffixSerdeProperties
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixSerdeProperties_in_alterTblPartitionStatementSuffix4447);
					alterStatementSuffixSerdeProperties225=alterStatementSuffixSerdeProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixSerdeProperties225.getTree());

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1150:5: alterStatementSuffixRenamePart
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixRenamePart_in_alterTblPartitionStatementSuffix4453);
					alterStatementSuffixRenamePart226=alterStatementSuffixRenamePart();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixRenamePart226.getTree());

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1151:5: alterStatementSuffixBucketNum
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixBucketNum_in_alterTblPartitionStatementSuffix4459);
					alterStatementSuffixBucketNum227=alterStatementSuffixBucketNum();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixBucketNum227.getTree());

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1152:5: alterTblPartitionStatementSuffixSkewedLocation
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterTblPartitionStatementSuffixSkewedLocation_in_alterTblPartitionStatementSuffix4465);
					alterTblPartitionStatementSuffixSkewedLocation228=alterTblPartitionStatementSuffixSkewedLocation();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterTblPartitionStatementSuffixSkewedLocation228.getTree());

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1153:5: alterStatementSuffixClusterbySortby
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixClusterbySortby_in_alterTblPartitionStatementSuffix4471);
					alterStatementSuffixClusterbySortby229=alterStatementSuffixClusterbySortby();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixClusterbySortby229.getTree());

					}
					break;
				case 9 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1154:5: alterStatementSuffixCompact
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixCompact_in_alterTblPartitionStatementSuffix4477);
					alterStatementSuffixCompact230=alterStatementSuffixCompact();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixCompact230.getTree());

					}
					break;
				case 10 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1155:5: alterStatementSuffixUpdateStatsCol
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixUpdateStatsCol_in_alterTblPartitionStatementSuffix4483);
					alterStatementSuffixUpdateStatsCol231=alterStatementSuffixUpdateStatsCol();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixUpdateStatsCol231.getTree());

					}
					break;
				case 11 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1156:5: alterStatementSuffixUpdateStats
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixUpdateStats_in_alterTblPartitionStatementSuffix4489);
					alterStatementSuffixUpdateStats232=alterStatementSuffixUpdateStats();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixUpdateStats232.getTree());

					}
					break;
				case 12 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1157:5: alterStatementSuffixRenameCol
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixRenameCol_in_alterTblPartitionStatementSuffix4495);
					alterStatementSuffixRenameCol233=alterStatementSuffixRenameCol();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixRenameCol233.getTree());

					}
					break;
				case 13 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1158:5: alterStatementSuffixAddCol
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixAddCol_in_alterTblPartitionStatementSuffix4501);
					alterStatementSuffixAddCol234=alterStatementSuffixAddCol();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixAddCol234.getTree());

					}
					break;
				case 14 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1159:5: alterStatementSuffixUpdateColumns
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixUpdateColumns_in_alterTblPartitionStatementSuffix4507);
					alterStatementSuffixUpdateColumns235=alterStatementSuffixUpdateColumns();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixUpdateColumns235.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterTblPartitionStatementSuffix"


	public static class alterStatementPartitionKeyType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementPartitionKeyType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1162:1: alterStatementPartitionKeyType : KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN -> ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType ) ;
	public final HiveParser.alterStatementPartitionKeyType_return alterStatementPartitionKeyType() throws RecognitionException {
		HiveParser.alterStatementPartitionKeyType_return retval = new HiveParser.alterStatementPartitionKeyType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_PARTITION236=null;
		Token KW_COLUMN237=null;
		Token LPAREN238=null;
		Token RPAREN240=null;
		ParserRuleReturnScope columnNameType239 =null;

		ASTNode KW_PARTITION236_tree=null;
		ASTNode KW_COLUMN237_tree=null;
		ASTNode LPAREN238_tree=null;
		ASTNode RPAREN240_tree=null;
		RewriteRuleTokenStream stream_KW_PARTITION=new RewriteRuleTokenStream(adaptor,"token KW_PARTITION");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
		RewriteRuleSubtreeStream stream_columnNameType=new RewriteRuleSubtreeStream(adaptor,"rule columnNameType");

		msgs.push("alter partition key type"); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1165:2: ( KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN -> ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1165:4: KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
			{
			KW_PARTITION236=(Token)match(input,KW_PARTITION,FOLLOW_KW_PARTITION_in_alterStatementPartitionKeyType4529); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_PARTITION.add(KW_PARTITION236);

			KW_COLUMN237=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementPartitionKeyType4531); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COLUMN.add(KW_COLUMN237);

			LPAREN238=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_alterStatementPartitionKeyType4533); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN238);

			pushFollow(FOLLOW_columnNameType_in_alterStatementPartitionKeyType4535);
			columnNameType239=columnNameType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameType.add(columnNameType239.getTree());
			RPAREN240=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_alterStatementPartitionKeyType4537); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN240);

			// AST REWRITE
			// elements: columnNameType
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1166:2: -> ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1166:5: ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_PARTCOLTYPE, "TOK_ALTERTABLE_PARTCOLTYPE"), root_1);
				adaptor.addChild(root_1, stream_columnNameType.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {msgs.pop();}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementPartitionKeyType"


	public static class alterViewStatementSuffix_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterViewStatementSuffix"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1169:1: alterViewStatementSuffix : ( alterViewSuffixProperties | alterStatementSuffixRename[false] | alterStatementSuffixAddPartitions[false] | alterStatementSuffixDropPartitions[false] | selectStatementWithCTE );
	public final HiveParser.alterViewStatementSuffix_return alterViewStatementSuffix() throws RecognitionException {
		HiveParser.alterViewStatementSuffix_return retval = new HiveParser.alterViewStatementSuffix_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope alterViewSuffixProperties241 =null;
		ParserRuleReturnScope alterStatementSuffixRename242 =null;
		ParserRuleReturnScope alterStatementSuffixAddPartitions243 =null;
		ParserRuleReturnScope alterStatementSuffixDropPartitions244 =null;
		ParserRuleReturnScope selectStatementWithCTE245 =null;


		 pushMsg("alter view statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1172:5: ( alterViewSuffixProperties | alterStatementSuffixRename[false] | alterStatementSuffixAddPartitions[false] | alterStatementSuffixDropPartitions[false] | selectStatementWithCTE )
			int alt67=5;
			switch ( input.LA(1) ) {
			case KW_SET:
			case KW_UNSET:
				{
				alt67=1;
				}
				break;
			case KW_RENAME:
				{
				alt67=2;
				}
				break;
			case KW_ADD:
				{
				alt67=3;
				}
				break;
			case KW_DROP:
				{
				alt67=4;
				}
				break;
			case KW_MAP:
			case KW_REDUCE:
			case KW_SELECT:
			case KW_WITH:
			case LPAREN:
				{
				alt67=5;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 67, 0, input);
				throw nvae;
			}
			switch (alt67) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1172:7: alterViewSuffixProperties
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterViewSuffixProperties_in_alterViewStatementSuffix4570);
					alterViewSuffixProperties241=alterViewSuffixProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterViewSuffixProperties241.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1173:7: alterStatementSuffixRename[false]
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixRename_in_alterViewStatementSuffix4578);
					alterStatementSuffixRename242=alterStatementSuffixRename(false);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixRename242.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1174:7: alterStatementSuffixAddPartitions[false]
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixAddPartitions_in_alterViewStatementSuffix4587);
					alterStatementSuffixAddPartitions243=alterStatementSuffixAddPartitions(false);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixAddPartitions243.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1175:7: alterStatementSuffixDropPartitions[false]
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterStatementSuffixDropPartitions_in_alterViewStatementSuffix4596);
					alterStatementSuffixDropPartitions244=alterStatementSuffixDropPartitions(false);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterStatementSuffixDropPartitions244.getTree());

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1176:7: selectStatementWithCTE
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_selectStatementWithCTE_in_alterViewStatementSuffix4605);
					selectStatementWithCTE245=selectStatementWithCTE();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, selectStatementWithCTE245.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterViewStatementSuffix"


	public static class alterMaterializedViewStatementSuffix_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterMaterializedViewStatementSuffix"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1179:1: alterMaterializedViewStatementSuffix : ( alterMaterializedViewSuffixRewrite | alterMaterializedViewSuffixRebuild );
	public final HiveParser.alterMaterializedViewStatementSuffix_return alterMaterializedViewStatementSuffix() throws RecognitionException {
		HiveParser.alterMaterializedViewStatementSuffix_return retval = new HiveParser.alterMaterializedViewStatementSuffix_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope alterMaterializedViewSuffixRewrite246 =null;
		ParserRuleReturnScope alterMaterializedViewSuffixRebuild247 =null;


		 pushMsg("alter materialized view statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1182:5: ( alterMaterializedViewSuffixRewrite | alterMaterializedViewSuffixRebuild )
			int alt68=2;
			int LA68_0 = input.LA(1);
			if ( (LA68_0==KW_DISABLE||LA68_0==KW_ENABLE) ) {
				alt68=1;
			}
			else if ( (LA68_0==KW_REBUILD) ) {
				alt68=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 68, 0, input);
				throw nvae;
			}

			switch (alt68) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1182:7: alterMaterializedViewSuffixRewrite
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterMaterializedViewSuffixRewrite_in_alterMaterializedViewStatementSuffix4632);
					alterMaterializedViewSuffixRewrite246=alterMaterializedViewSuffixRewrite();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterMaterializedViewSuffixRewrite246.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1183:7: alterMaterializedViewSuffixRebuild
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterMaterializedViewSuffixRebuild_in_alterMaterializedViewStatementSuffix4640);
					alterMaterializedViewSuffixRebuild247=alterMaterializedViewSuffixRebuild();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterMaterializedViewSuffixRebuild247.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterMaterializedViewStatementSuffix"


	public static class alterDatabaseStatementSuffix_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterDatabaseStatementSuffix"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1186:1: alterDatabaseStatementSuffix : ( alterDatabaseSuffixProperties | alterDatabaseSuffixSetOwner | alterDatabaseSuffixSetLocation );
	public final HiveParser.alterDatabaseStatementSuffix_return alterDatabaseStatementSuffix() throws RecognitionException {
		HiveParser.alterDatabaseStatementSuffix_return retval = new HiveParser.alterDatabaseStatementSuffix_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope alterDatabaseSuffixProperties248 =null;
		ParserRuleReturnScope alterDatabaseSuffixSetOwner249 =null;
		ParserRuleReturnScope alterDatabaseSuffixSetLocation250 =null;


		 pushMsg("alter database statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1189:5: ( alterDatabaseSuffixProperties | alterDatabaseSuffixSetOwner | alterDatabaseSuffixSetLocation )
			int alt69=3;
			int LA69_0 = input.LA(1);
			if ( (LA69_0==Identifier) ) {
				int LA69_1 = input.LA(2);
				if ( (LA69_1==KW_SET) ) {
					switch ( input.LA(3) ) {
					case KW_DBPROPERTIES:
						{
						alt69=1;
						}
						break;
					case KW_OWNER:
						{
						alt69=2;
						}
						break;
					case KW_LOCATION:
						{
						alt69=3;
						}
						break;
					default:
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 69, 3, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 69, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}
			else if ( ((LA69_0 >= KW_ABORT && LA69_0 <= KW_AFTER)||LA69_0==KW_ALLOC_FRACTION||LA69_0==KW_ANALYZE||LA69_0==KW_ARCHIVE||LA69_0==KW_ASC||(LA69_0 >= KW_AUTOCOMMIT && LA69_0 <= KW_BEFORE)||(LA69_0 >= KW_BUCKET && LA69_0 <= KW_BUCKETS)||(LA69_0 >= KW_CACHE && LA69_0 <= KW_CASCADE)||LA69_0==KW_CHANGE||(LA69_0 >= KW_CHECK && LA69_0 <= KW_COLLECTION)||(LA69_0 >= KW_COLUMNS && LA69_0 <= KW_COMMENT)||(LA69_0 >= KW_COMPACT && LA69_0 <= KW_CONCATENATE)||LA69_0==KW_CONTINUE||LA69_0==KW_DATA||LA69_0==KW_DATABASES||(LA69_0 >= KW_DATETIME && LA69_0 <= KW_DBPROPERTIES)||(LA69_0 >= KW_DEFAULT && LA69_0 <= KW_DEFINED)||(LA69_0 >= KW_DELIMITED && LA69_0 <= KW_DESC)||(LA69_0 >= KW_DETAIL && LA69_0 <= KW_DISABLE)||(LA69_0 >= KW_DISTRIBUTE && LA69_0 <= KW_DO)||LA69_0==KW_DOW||(LA69_0 >= KW_DUMP && LA69_0 <= KW_ELEM_TYPE)||LA69_0==KW_ENABLE||(LA69_0 >= KW_ENFORCED && LA69_0 <= KW_ESCAPED)||LA69_0==KW_EXCLUSIVE||(LA69_0 >= KW_EXPLAIN && LA69_0 <= KW_EXPRESSION)||(LA69_0 >= KW_FIELDS && LA69_0 <= KW_FIRST)||(LA69_0 >= KW_FORMAT && LA69_0 <= KW_FORMATTED)||LA69_0==KW_FUNCTIONS||(LA69_0 >= KW_HOUR && LA69_0 <= KW_IDXPROPERTIES)||(LA69_0 >= KW_INDEX && LA69_0 <= KW_INDEXES)||(LA69_0 >= KW_INPATH && LA69_0 <= KW_INPUTFORMAT)||(LA69_0 >= KW_ISOLATION && LA69_0 <= KW_JAR)||(LA69_0 >= KW_KEY && LA69_0 <= KW_LAST)||LA69_0==KW_LEVEL||(LA69_0 >= KW_LIMIT && LA69_0 <= KW_LOAD)||(LA69_0 >= KW_LOCATION && LA69_0 <= KW_LONG)||LA69_0==KW_MANAGEMENT||(LA69_0 >= KW_MAPJOIN && LA69_0 <= KW_MATERIALIZED)||LA69_0==KW_METADATA||(LA69_0 >= KW_MINUTE && LA69_0 <= KW_MONTH)||(LA69_0 >= KW_MOVE && LA69_0 <= KW_MSCK)||(LA69_0 >= KW_NORELY && LA69_0 <= KW_NOSCAN)||LA69_0==KW_NOVALIDATE||LA69_0==KW_NULLS||LA69_0==KW_OFFSET||(LA69_0 >= KW_OPERATOR && LA69_0 <= KW_OPTION)||(LA69_0 >= KW_OUTPUTDRIVER && LA69_0 <= KW_OUTPUTFORMAT)||(LA69_0 >= KW_OVERWRITE && LA69_0 <= KW_OWNER)||(LA69_0 >= KW_PARTITIONED && LA69_0 <= KW_PATH)||(LA69_0 >= KW_PLAN && LA69_0 <= KW_POOL)||LA69_0==KW_PRINCIPALS||(LA69_0 >= KW_PURGE && LA69_0 <= KW_QUERY_PARALLELISM)||LA69_0==KW_READ||(LA69_0 >= KW_REBUILD && LA69_0 <= KW_RECORDWRITER)||(LA69_0 >= KW_RELOAD && LA69_0 <= KW_RESTRICT)||LA69_0==KW_REWRITE||(LA69_0 >= KW_ROLE && LA69_0 <= KW_ROLES)||(LA69_0 >= KW_SCHEDULING_POLICY && LA69_0 <= KW_SECOND)||(LA69_0 >= KW_SEMI && LA69_0 <= KW_SERVER)||(LA69_0 >= KW_SETS && LA69_0 <= KW_SKEWED)||(LA69_0 >= KW_SNAPSHOT && LA69_0 <= KW_SSL)||(LA69_0 >= KW_STATISTICS && LA69_0 <= KW_SUMMARY)||LA69_0==KW_TABLES||(LA69_0 >= KW_TBLPROPERTIES && LA69_0 <= KW_TERMINATED)||LA69_0==KW_TINYINT||(LA69_0 >= KW_TOUCH && LA69_0 <= KW_TRANSACTIONS)||LA69_0==KW_UNARCHIVE||LA69_0==KW_UNDO||LA69_0==KW_UNIONTYPE||(LA69_0 >= KW_UNLOCK && LA69_0 <= KW_UNSIGNED)||(LA69_0 >= KW_URI && LA69_0 <= KW_USE)||(LA69_0 >= KW_UTC && LA69_0 <= KW_VALIDATE)||LA69_0==KW_VALUE_TYPE||(LA69_0 >= KW_VECTORIZATION && LA69_0 <= KW_WEEK)||LA69_0==KW_WHILE||(LA69_0 >= KW_WORK && LA69_0 <= KW_ZONE)||LA69_0==KW_BATCH||LA69_0==KW_DAYOFWEEK||LA69_0==KW_HOLD_DDLTIME||LA69_0==KW_IGNORE||LA69_0==KW_NO_DROP||LA69_0==KW_OFFLINE||LA69_0==KW_PROTECTION||LA69_0==KW_READONLY||LA69_0==KW_TIMESTAMPTZ) ) {
				int LA69_2 = input.LA(2);
				if ( (LA69_2==KW_SET) ) {
					switch ( input.LA(3) ) {
					case KW_DBPROPERTIES:
						{
						alt69=1;
						}
						break;
					case KW_OWNER:
						{
						alt69=2;
						}
						break;
					case KW_LOCATION:
						{
						alt69=3;
						}
						break;
					default:
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 69, 4, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 69, 2, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 69, 0, input);
				throw nvae;
			}

			switch (alt69) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1189:7: alterDatabaseSuffixProperties
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterDatabaseSuffixProperties_in_alterDatabaseStatementSuffix4667);
					alterDatabaseSuffixProperties248=alterDatabaseSuffixProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterDatabaseSuffixProperties248.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1190:7: alterDatabaseSuffixSetOwner
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterDatabaseSuffixSetOwner_in_alterDatabaseStatementSuffix4675);
					alterDatabaseSuffixSetOwner249=alterDatabaseSuffixSetOwner();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterDatabaseSuffixSetOwner249.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1191:7: alterDatabaseSuffixSetLocation
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_alterDatabaseSuffixSetLocation_in_alterDatabaseStatementSuffix4683);
					alterDatabaseSuffixSetLocation250=alterDatabaseSuffixSetLocation();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterDatabaseSuffixSetLocation250.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterDatabaseStatementSuffix"


	public static class alterDatabaseSuffixProperties_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterDatabaseSuffixProperties"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1194:1: alterDatabaseSuffixProperties : name= identifier KW_SET KW_DBPROPERTIES dbProperties -> ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties ) ;
	public final HiveParser.alterDatabaseSuffixProperties_return alterDatabaseSuffixProperties() throws RecognitionException {
		HiveParser.alterDatabaseSuffixProperties_return retval = new HiveParser.alterDatabaseSuffixProperties_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET251=null;
		Token KW_DBPROPERTIES252=null;
		ParserRuleReturnScope name =null;
		ParserRuleReturnScope dbProperties253 =null;

		ASTNode KW_SET251_tree=null;
		ASTNode KW_DBPROPERTIES252_tree=null;
		RewriteRuleTokenStream stream_KW_DBPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_DBPROPERTIES");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_dbProperties=new RewriteRuleSubtreeStream(adaptor,"rule dbProperties");

		 pushMsg("alter database properties statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1197:5: (name= identifier KW_SET KW_DBPROPERTIES dbProperties -> ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1197:7: name= identifier KW_SET KW_DBPROPERTIES dbProperties
			{
			pushFollow(FOLLOW_identifier_in_alterDatabaseSuffixProperties4712);
			name=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(name.getTree());
			KW_SET251=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterDatabaseSuffixProperties4714); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET251);

			KW_DBPROPERTIES252=(Token)match(input,KW_DBPROPERTIES,FOLLOW_KW_DBPROPERTIES_in_alterDatabaseSuffixProperties4716); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DBPROPERTIES.add(KW_DBPROPERTIES252);

			pushFollow(FOLLOW_dbProperties_in_alterDatabaseSuffixProperties4718);
			dbProperties253=dbProperties();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_dbProperties.add(dbProperties253.getTree());
			// AST REWRITE
			// elements: name, dbProperties
			// token labels: 
			// rule labels: name, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1198:5: -> ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1198:8: ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERDATABASE_PROPERTIES, "TOK_ALTERDATABASE_PROPERTIES"), root_1);
				adaptor.addChild(root_1, stream_name.nextTree());
				adaptor.addChild(root_1, stream_dbProperties.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterDatabaseSuffixProperties"


	public static class alterDatabaseSuffixSetOwner_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterDatabaseSuffixSetOwner"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1201:1: alterDatabaseSuffixSetOwner : dbName= identifier KW_SET KW_OWNER principalName -> ^( TOK_ALTERDATABASE_OWNER $dbName principalName ) ;
	public final HiveParser.alterDatabaseSuffixSetOwner_return alterDatabaseSuffixSetOwner() throws RecognitionException {
		HiveParser.alterDatabaseSuffixSetOwner_return retval = new HiveParser.alterDatabaseSuffixSetOwner_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET254=null;
		Token KW_OWNER255=null;
		ParserRuleReturnScope dbName =null;
		ParserRuleReturnScope principalName256 =null;

		ASTNode KW_SET254_tree=null;
		ASTNode KW_OWNER255_tree=null;
		RewriteRuleTokenStream stream_KW_OWNER=new RewriteRuleTokenStream(adaptor,"token KW_OWNER");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");

		 pushMsg("alter database set owner", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1204:5: (dbName= identifier KW_SET KW_OWNER principalName -> ^( TOK_ALTERDATABASE_OWNER $dbName principalName ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1204:7: dbName= identifier KW_SET KW_OWNER principalName
			{
			pushFollow(FOLLOW_identifier_in_alterDatabaseSuffixSetOwner4762);
			dbName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
			KW_SET254=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterDatabaseSuffixSetOwner4764); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET254);

			KW_OWNER255=(Token)match(input,KW_OWNER,FOLLOW_KW_OWNER_in_alterDatabaseSuffixSetOwner4766); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_OWNER.add(KW_OWNER255);

			pushFollow(FOLLOW_principalName_in_alterDatabaseSuffixSetOwner4768);
			principalName256=principalName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalName.add(principalName256.getTree());
			// AST REWRITE
			// elements: principalName, dbName
			// token labels: 
			// rule labels: dbName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1205:5: -> ^( TOK_ALTERDATABASE_OWNER $dbName principalName )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1205:8: ^( TOK_ALTERDATABASE_OWNER $dbName principalName )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERDATABASE_OWNER, "TOK_ALTERDATABASE_OWNER"), root_1);
				adaptor.addChild(root_1, stream_dbName.nextTree());
				adaptor.addChild(root_1, stream_principalName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterDatabaseSuffixSetOwner"


	public static class alterDatabaseSuffixSetLocation_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterDatabaseSuffixSetLocation"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1208:1: alterDatabaseSuffixSetLocation : dbName= identifier KW_SET KW_LOCATION newLocation= StringLiteral -> ^( TOK_ALTERDATABASE_LOCATION $dbName $newLocation) ;
	public final HiveParser.alterDatabaseSuffixSetLocation_return alterDatabaseSuffixSetLocation() throws RecognitionException {
		HiveParser.alterDatabaseSuffixSetLocation_return retval = new HiveParser.alterDatabaseSuffixSetLocation_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token newLocation=null;
		Token KW_SET257=null;
		Token KW_LOCATION258=null;
		ParserRuleReturnScope dbName =null;

		ASTNode newLocation_tree=null;
		ASTNode KW_SET257_tree=null;
		ASTNode KW_LOCATION258_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		 pushMsg("alter database set location", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1211:5: (dbName= identifier KW_SET KW_LOCATION newLocation= StringLiteral -> ^( TOK_ALTERDATABASE_LOCATION $dbName $newLocation) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1211:7: dbName= identifier KW_SET KW_LOCATION newLocation= StringLiteral
			{
			pushFollow(FOLLOW_identifier_in_alterDatabaseSuffixSetLocation4812);
			dbName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
			KW_SET257=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterDatabaseSuffixSetLocation4814); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET257);

			KW_LOCATION258=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_alterDatabaseSuffixSetLocation4816); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCATION.add(KW_LOCATION258);

			newLocation=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterDatabaseSuffixSetLocation4820); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(newLocation);

			// AST REWRITE
			// elements: newLocation, dbName
			// token labels: newLocation
			// rule labels: dbName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_newLocation=new RewriteRuleTokenStream(adaptor,"token newLocation",newLocation);
			RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1212:5: -> ^( TOK_ALTERDATABASE_LOCATION $dbName $newLocation)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1212:8: ^( TOK_ALTERDATABASE_LOCATION $dbName $newLocation)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERDATABASE_LOCATION, "TOK_ALTERDATABASE_LOCATION"), root_1);
				adaptor.addChild(root_1, stream_dbName.nextTree());
				adaptor.addChild(root_1, stream_newLocation.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterDatabaseSuffixSetLocation"


	public static class alterStatementSuffixRename_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixRename"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1215:1: alterStatementSuffixRename[boolean table] : KW_RENAME KW_TO tableName -> { table }? ^( TOK_ALTERTABLE_RENAME tableName ) -> ^( TOK_ALTERVIEW_RENAME tableName ) ;
	public final HiveParser.alterStatementSuffixRename_return alterStatementSuffixRename(boolean table) throws RecognitionException {
		HiveParser.alterStatementSuffixRename_return retval = new HiveParser.alterStatementSuffixRename_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_RENAME259=null;
		Token KW_TO260=null;
		ParserRuleReturnScope tableName261 =null;

		ASTNode KW_RENAME259_tree=null;
		ASTNode KW_TO260_tree=null;
		RewriteRuleTokenStream stream_KW_RENAME=new RewriteRuleTokenStream(adaptor,"token KW_RENAME");
		RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("rename statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1218:5: ( KW_RENAME KW_TO tableName -> { table }? ^( TOK_ALTERTABLE_RENAME tableName ) -> ^( TOK_ALTERVIEW_RENAME tableName ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1218:7: KW_RENAME KW_TO tableName
			{
			KW_RENAME259=(Token)match(input,KW_RENAME,FOLLOW_KW_RENAME_in_alterStatementSuffixRename4864); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_RENAME.add(KW_RENAME259);

			KW_TO260=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_alterStatementSuffixRename4866); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TO.add(KW_TO260);

			pushFollow(FOLLOW_tableName_in_alterStatementSuffixRename4868);
			tableName261=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName261.getTree());
			// AST REWRITE
			// elements: tableName, tableName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1219:5: -> { table }? ^( TOK_ALTERTABLE_RENAME tableName )
			if ( table ) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1219:19: ^( TOK_ALTERTABLE_RENAME tableName )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_RENAME, "TOK_ALTERTABLE_RENAME"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}

			else // 1220:5: -> ^( TOK_ALTERVIEW_RENAME tableName )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1220:19: ^( TOK_ALTERVIEW_RENAME tableName )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERVIEW_RENAME, "TOK_ALTERVIEW_RENAME"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixRename"


	public static class alterStatementSuffixAddCol_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixAddCol"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1223:1: alterStatementSuffixAddCol : (add= KW_ADD |replace= KW_REPLACE ) KW_COLUMNS LPAREN columnNameTypeList RPAREN ( restrictOrCascade )? -> {$add != null}? ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList ( restrictOrCascade )? ) -> ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList ( restrictOrCascade )? ) ;
	public final HiveParser.alterStatementSuffixAddCol_return alterStatementSuffixAddCol() throws RecognitionException {
		HiveParser.alterStatementSuffixAddCol_return retval = new HiveParser.alterStatementSuffixAddCol_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token add=null;
		Token replace=null;
		Token KW_COLUMNS262=null;
		Token LPAREN263=null;
		Token RPAREN265=null;
		ParserRuleReturnScope columnNameTypeList264 =null;
		ParserRuleReturnScope restrictOrCascade266 =null;

		ASTNode add_tree=null;
		ASTNode replace_tree=null;
		ASTNode KW_COLUMNS262_tree=null;
		ASTNode LPAREN263_tree=null;
		ASTNode RPAREN265_tree=null;
		RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_REPLACE=new RewriteRuleTokenStream(adaptor,"token KW_REPLACE");
		RewriteRuleTokenStream stream_KW_ADD=new RewriteRuleTokenStream(adaptor,"token KW_ADD");
		RewriteRuleSubtreeStream stream_columnNameTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeList");
		RewriteRuleSubtreeStream stream_restrictOrCascade=new RewriteRuleSubtreeStream(adaptor,"rule restrictOrCascade");

		 pushMsg("add column statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1226:5: ( (add= KW_ADD |replace= KW_REPLACE ) KW_COLUMNS LPAREN columnNameTypeList RPAREN ( restrictOrCascade )? -> {$add != null}? ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList ( restrictOrCascade )? ) -> ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList ( restrictOrCascade )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1226:7: (add= KW_ADD |replace= KW_REPLACE ) KW_COLUMNS LPAREN columnNameTypeList RPAREN ( restrictOrCascade )?
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1226:7: (add= KW_ADD |replace= KW_REPLACE )
			int alt70=2;
			int LA70_0 = input.LA(1);
			if ( (LA70_0==KW_ADD) ) {
				alt70=1;
			}
			else if ( (LA70_0==KW_REPLACE) ) {
				alt70=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 70, 0, input);
				throw nvae;
			}

			switch (alt70) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1226:8: add= KW_ADD
					{
					add=(Token)match(input,KW_ADD,FOLLOW_KW_ADD_in_alterStatementSuffixAddCol4935); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ADD.add(add);

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1226:21: replace= KW_REPLACE
					{
					replace=(Token)match(input,KW_REPLACE,FOLLOW_KW_REPLACE_in_alterStatementSuffixAddCol4941); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_REPLACE.add(replace);

					}
					break;

			}

			KW_COLUMNS262=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_alterStatementSuffixAddCol4944); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COLUMNS.add(KW_COLUMNS262);

			LPAREN263=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_alterStatementSuffixAddCol4946); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN263);

			pushFollow(FOLLOW_columnNameTypeList_in_alterStatementSuffixAddCol4948);
			columnNameTypeList264=columnNameTypeList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameTypeList.add(columnNameTypeList264.getTree());
			RPAREN265=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_alterStatementSuffixAddCol4950); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN265);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1226:85: ( restrictOrCascade )?
			int alt71=2;
			int LA71_0 = input.LA(1);
			if ( (LA71_0==KW_CASCADE||LA71_0==KW_RESTRICT) ) {
				alt71=1;
			}
			switch (alt71) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1226:85: restrictOrCascade
					{
					pushFollow(FOLLOW_restrictOrCascade_in_alterStatementSuffixAddCol4952);
					restrictOrCascade266=restrictOrCascade();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_restrictOrCascade.add(restrictOrCascade266.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: restrictOrCascade, restrictOrCascade, columnNameTypeList, columnNameTypeList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1227:5: -> {$add != null}? ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList ( restrictOrCascade )? )
			if (add != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1227:24: ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList ( restrictOrCascade )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_ADDCOLS, "TOK_ALTERTABLE_ADDCOLS"), root_1);
				adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1227:68: ( restrictOrCascade )?
				if ( stream_restrictOrCascade.hasNext() ) {
					adaptor.addChild(root_1, stream_restrictOrCascade.nextTree());
				}
				stream_restrictOrCascade.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 1228:5: -> ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList ( restrictOrCascade )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1228:24: ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList ( restrictOrCascade )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_REPLACECOLS, "TOK_ALTERTABLE_REPLACECOLS"), root_1);
				adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1228:72: ( restrictOrCascade )?
				if ( stream_restrictOrCascade.hasNext() ) {
					adaptor.addChild(root_1, stream_restrictOrCascade.nextTree());
				}
				stream_restrictOrCascade.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixAddCol"


	public static class alterStatementSuffixAddConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixAddConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1231:1: alterStatementSuffixAddConstraint : KW_ADD (fk= alterForeignKeyWithName | alterConstraintWithName ) -> {fk != null}? ^( TOK_ALTERTABLE_ADDCONSTRAINT alterForeignKeyWithName ) -> ^( TOK_ALTERTABLE_ADDCONSTRAINT alterConstraintWithName ) ;
	public final HiveParser.alterStatementSuffixAddConstraint_return alterStatementSuffixAddConstraint() throws RecognitionException {
		HiveParser.alterStatementSuffixAddConstraint_return retval = new HiveParser.alterStatementSuffixAddConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ADD267=null;
		ParserRuleReturnScope fk =null;
		ParserRuleReturnScope alterConstraintWithName268 =null;

		ASTNode KW_ADD267_tree=null;
		RewriteRuleTokenStream stream_KW_ADD=new RewriteRuleTokenStream(adaptor,"token KW_ADD");
		RewriteRuleSubtreeStream stream_alterForeignKeyWithName=new RewriteRuleSubtreeStream(adaptor,"rule alterForeignKeyWithName");
		RewriteRuleSubtreeStream stream_alterConstraintWithName=new RewriteRuleSubtreeStream(adaptor,"rule alterConstraintWithName");

		 pushMsg("add constraint statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1234:4: ( KW_ADD (fk= alterForeignKeyWithName | alterConstraintWithName ) -> {fk != null}? ^( TOK_ALTERTABLE_ADDCONSTRAINT alterForeignKeyWithName ) -> ^( TOK_ALTERTABLE_ADDCONSTRAINT alterConstraintWithName ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1234:7: KW_ADD (fk= alterForeignKeyWithName | alterConstraintWithName )
			{
			KW_ADD267=(Token)match(input,KW_ADD,FOLLOW_KW_ADD_in_alterStatementSuffixAddConstraint5028); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ADD.add(KW_ADD267);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1234:14: (fk= alterForeignKeyWithName | alterConstraintWithName )
			int alt72=2;
			int LA72_0 = input.LA(1);
			if ( (LA72_0==KW_CONSTRAINT) ) {
				int LA72_1 = input.LA(2);
				if ( (LA72_1==Identifier) ) {
					int LA72_2 = input.LA(3);
					if ( (LA72_2==KW_FOREIGN) ) {
						alt72=1;
					}
					else if ( (LA72_2==KW_CHECK||LA72_2==KW_PRIMARY||LA72_2==KW_UNIQUE) ) {
						alt72=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 72, 2, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}
				else if ( ((LA72_1 >= KW_ABORT && LA72_1 <= KW_AFTER)||LA72_1==KW_ALLOC_FRACTION||LA72_1==KW_ANALYZE||LA72_1==KW_ARCHIVE||LA72_1==KW_ASC||(LA72_1 >= KW_AUTOCOMMIT && LA72_1 <= KW_BEFORE)||(LA72_1 >= KW_BUCKET && LA72_1 <= KW_BUCKETS)||(LA72_1 >= KW_CACHE && LA72_1 <= KW_CASCADE)||LA72_1==KW_CHANGE||(LA72_1 >= KW_CHECK && LA72_1 <= KW_COLLECTION)||(LA72_1 >= KW_COLUMNS && LA72_1 <= KW_COMMENT)||(LA72_1 >= KW_COMPACT && LA72_1 <= KW_CONCATENATE)||LA72_1==KW_CONTINUE||LA72_1==KW_DATA||LA72_1==KW_DATABASES||(LA72_1 >= KW_DATETIME && LA72_1 <= KW_DBPROPERTIES)||(LA72_1 >= KW_DEFAULT && LA72_1 <= KW_DEFINED)||(LA72_1 >= KW_DELIMITED && LA72_1 <= KW_DESC)||(LA72_1 >= KW_DETAIL && LA72_1 <= KW_DISABLE)||(LA72_1 >= KW_DISTRIBUTE && LA72_1 <= KW_DO)||LA72_1==KW_DOW||(LA72_1 >= KW_DUMP && LA72_1 <= KW_ELEM_TYPE)||LA72_1==KW_ENABLE||(LA72_1 >= KW_ENFORCED && LA72_1 <= KW_ESCAPED)||LA72_1==KW_EXCLUSIVE||(LA72_1 >= KW_EXPLAIN && LA72_1 <= KW_EXPRESSION)||(LA72_1 >= KW_FIELDS && LA72_1 <= KW_FIRST)||(LA72_1 >= KW_FORMAT && LA72_1 <= KW_FORMATTED)||LA72_1==KW_FUNCTIONS||(LA72_1 >= KW_HOUR && LA72_1 <= KW_IDXPROPERTIES)||(LA72_1 >= KW_INDEX && LA72_1 <= KW_INDEXES)||(LA72_1 >= KW_INPATH && LA72_1 <= KW_INPUTFORMAT)||(LA72_1 >= KW_ISOLATION && LA72_1 <= KW_JAR)||(LA72_1 >= KW_KEY && LA72_1 <= KW_LAST)||LA72_1==KW_LEVEL||(LA72_1 >= KW_LIMIT && LA72_1 <= KW_LOAD)||(LA72_1 >= KW_LOCATION && LA72_1 <= KW_LONG)||LA72_1==KW_MANAGEMENT||(LA72_1 >= KW_MAPJOIN && LA72_1 <= KW_MATERIALIZED)||LA72_1==KW_METADATA||(LA72_1 >= KW_MINUTE && LA72_1 <= KW_MONTH)||(LA72_1 >= KW_MOVE && LA72_1 <= KW_MSCK)||(LA72_1 >= KW_NORELY && LA72_1 <= KW_NOSCAN)||LA72_1==KW_NOVALIDATE||LA72_1==KW_NULLS||LA72_1==KW_OFFSET||(LA72_1 >= KW_OPERATOR && LA72_1 <= KW_OPTION)||(LA72_1 >= KW_OUTPUTDRIVER && LA72_1 <= KW_OUTPUTFORMAT)||(LA72_1 >= KW_OVERWRITE && LA72_1 <= KW_OWNER)||(LA72_1 >= KW_PARTITIONED && LA72_1 <= KW_PATH)||(LA72_1 >= KW_PLAN && LA72_1 <= KW_POOL)||LA72_1==KW_PRINCIPALS||(LA72_1 >= KW_PURGE && LA72_1 <= KW_QUERY_PARALLELISM)||LA72_1==KW_READ||(LA72_1 >= KW_REBUILD && LA72_1 <= KW_RECORDWRITER)||(LA72_1 >= KW_RELOAD && LA72_1 <= KW_RESTRICT)||LA72_1==KW_REWRITE||(LA72_1 >= KW_ROLE && LA72_1 <= KW_ROLES)||(LA72_1 >= KW_SCHEDULING_POLICY && LA72_1 <= KW_SECOND)||(LA72_1 >= KW_SEMI && LA72_1 <= KW_SERVER)||(LA72_1 >= KW_SETS && LA72_1 <= KW_SKEWED)||(LA72_1 >= KW_SNAPSHOT && LA72_1 <= KW_SSL)||(LA72_1 >= KW_STATISTICS && LA72_1 <= KW_SUMMARY)||LA72_1==KW_TABLES||(LA72_1 >= KW_TBLPROPERTIES && LA72_1 <= KW_TERMINATED)||LA72_1==KW_TINYINT||(LA72_1 >= KW_TOUCH && LA72_1 <= KW_TRANSACTIONS)||LA72_1==KW_UNARCHIVE||LA72_1==KW_UNDO||LA72_1==KW_UNIONTYPE||(LA72_1 >= KW_UNLOCK && LA72_1 <= KW_UNSIGNED)||(LA72_1 >= KW_URI && LA72_1 <= KW_USE)||(LA72_1 >= KW_UTC && LA72_1 <= KW_VALIDATE)||LA72_1==KW_VALUE_TYPE||(LA72_1 >= KW_VECTORIZATION && LA72_1 <= KW_WEEK)||LA72_1==KW_WHILE||(LA72_1 >= KW_WORK && LA72_1 <= KW_ZONE)||LA72_1==KW_BATCH||LA72_1==KW_DAYOFWEEK||LA72_1==KW_HOLD_DDLTIME||LA72_1==KW_IGNORE||LA72_1==KW_NO_DROP||LA72_1==KW_OFFLINE||LA72_1==KW_PROTECTION||LA72_1==KW_READONLY||LA72_1==KW_TIMESTAMPTZ) ) {
					int LA72_3 = input.LA(3);
					if ( (LA72_3==KW_FOREIGN) ) {
						alt72=1;
					}
					else if ( (LA72_3==KW_CHECK||LA72_3==KW_PRIMARY||LA72_3==KW_UNIQUE) ) {
						alt72=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 72, 3, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 72, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 72, 0, input);
				throw nvae;
			}

			switch (alt72) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1234:15: fk= alterForeignKeyWithName
					{
					pushFollow(FOLLOW_alterForeignKeyWithName_in_alterStatementSuffixAddConstraint5033);
					fk=alterForeignKeyWithName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterForeignKeyWithName.add(fk.getTree());
					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1234:44: alterConstraintWithName
					{
					pushFollow(FOLLOW_alterConstraintWithName_in_alterStatementSuffixAddConstraint5037);
					alterConstraintWithName268=alterConstraintWithName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterConstraintWithName.add(alterConstraintWithName268.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: alterConstraintWithName, alterForeignKeyWithName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1235:4: -> {fk != null}? ^( TOK_ALTERTABLE_ADDCONSTRAINT alterForeignKeyWithName )
			if (fk != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1235:21: ^( TOK_ALTERTABLE_ADDCONSTRAINT alterForeignKeyWithName )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_ADDCONSTRAINT, "TOK_ALTERTABLE_ADDCONSTRAINT"), root_1);
				adaptor.addChild(root_1, stream_alterForeignKeyWithName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}

			else // 1236:4: -> ^( TOK_ALTERTABLE_ADDCONSTRAINT alterConstraintWithName )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1236:21: ^( TOK_ALTERTABLE_ADDCONSTRAINT alterConstraintWithName )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_ADDCONSTRAINT, "TOK_ALTERTABLE_ADDCONSTRAINT"), root_1);
				adaptor.addChild(root_1, stream_alterConstraintWithName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixAddConstraint"


	public static class alterStatementSuffixUpdateColumns_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixUpdateColumns"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1239:1: alterStatementSuffixUpdateColumns : KW_UPDATE KW_COLUMNS ( restrictOrCascade )? -> ^( TOK_ALTERTABLE_UPDATECOLUMNS ( restrictOrCascade )? ) ;
	public final HiveParser.alterStatementSuffixUpdateColumns_return alterStatementSuffixUpdateColumns() throws RecognitionException {
		HiveParser.alterStatementSuffixUpdateColumns_return retval = new HiveParser.alterStatementSuffixUpdateColumns_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UPDATE269=null;
		Token KW_COLUMNS270=null;
		ParserRuleReturnScope restrictOrCascade271 =null;

		ASTNode KW_UPDATE269_tree=null;
		ASTNode KW_COLUMNS270_tree=null;
		RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
		RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
		RewriteRuleSubtreeStream stream_restrictOrCascade=new RewriteRuleSubtreeStream(adaptor,"rule restrictOrCascade");

		 pushMsg("update columns statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1242:5: ( KW_UPDATE KW_COLUMNS ( restrictOrCascade )? -> ^( TOK_ALTERTABLE_UPDATECOLUMNS ( restrictOrCascade )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1242:7: KW_UPDATE KW_COLUMNS ( restrictOrCascade )?
			{
			KW_UPDATE269=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_alterStatementSuffixUpdateColumns5102); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UPDATE.add(KW_UPDATE269);

			KW_COLUMNS270=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_alterStatementSuffixUpdateColumns5104); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COLUMNS.add(KW_COLUMNS270);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1242:28: ( restrictOrCascade )?
			int alt73=2;
			int LA73_0 = input.LA(1);
			if ( (LA73_0==KW_CASCADE||LA73_0==KW_RESTRICT) ) {
				alt73=1;
			}
			switch (alt73) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1242:28: restrictOrCascade
					{
					pushFollow(FOLLOW_restrictOrCascade_in_alterStatementSuffixUpdateColumns5106);
					restrictOrCascade271=restrictOrCascade();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_restrictOrCascade.add(restrictOrCascade271.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: restrictOrCascade
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1243:5: -> ^( TOK_ALTERTABLE_UPDATECOLUMNS ( restrictOrCascade )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1243:8: ^( TOK_ALTERTABLE_UPDATECOLUMNS ( restrictOrCascade )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_UPDATECOLUMNS, "TOK_ALTERTABLE_UPDATECOLUMNS"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1243:39: ( restrictOrCascade )?
				if ( stream_restrictOrCascade.hasNext() ) {
					adaptor.addChild(root_1, stream_restrictOrCascade.nextTree());
				}
				stream_restrictOrCascade.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixUpdateColumns"


	public static class alterStatementSuffixDropConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixDropConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1246:1: alterStatementSuffixDropConstraint : KW_DROP KW_CONSTRAINT cName= identifier -> ^( TOK_ALTERTABLE_DROPCONSTRAINT $cName) ;
	public final HiveParser.alterStatementSuffixDropConstraint_return alterStatementSuffixDropConstraint() throws RecognitionException {
		HiveParser.alterStatementSuffixDropConstraint_return retval = new HiveParser.alterStatementSuffixDropConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP272=null;
		Token KW_CONSTRAINT273=null;
		ParserRuleReturnScope cName =null;

		ASTNode KW_DROP272_tree=null;
		ASTNode KW_CONSTRAINT273_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		 pushMsg("drop constraint statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1249:4: ( KW_DROP KW_CONSTRAINT cName= identifier -> ^( TOK_ALTERTABLE_DROPCONSTRAINT $cName) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1249:6: KW_DROP KW_CONSTRAINT cName= identifier
			{
			KW_DROP272=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_alterStatementSuffixDropConstraint5146); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP272);

			KW_CONSTRAINT273=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_alterStatementSuffixDropConstraint5148); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT273);

			pushFollow(FOLLOW_identifier_in_alterStatementSuffixDropConstraint5152);
			cName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(cName.getTree());
			// AST REWRITE
			// elements: cName
			// token labels: 
			// rule labels: cName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_cName=new RewriteRuleSubtreeStream(adaptor,"rule cName",cName!=null?cName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1250:4: -> ^( TOK_ALTERTABLE_DROPCONSTRAINT $cName)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1250:6: ^( TOK_ALTERTABLE_DROPCONSTRAINT $cName)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_DROPCONSTRAINT, "TOK_ALTERTABLE_DROPCONSTRAINT"), root_1);
				adaptor.addChild(root_1, stream_cName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixDropConstraint"


	public static class alterStatementSuffixRenameCol_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixRenameCol"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1253:1: alterStatementSuffixRenameCol : KW_CHANGE ( KW_COLUMN )? oldName= identifier newName= identifier colType ( alterColumnConstraint[$newName.tree] )? ( KW_COMMENT comment= StringLiteral )? ( alterStatementChangeColPosition )? ( restrictOrCascade )? -> ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterColumnConstraint )? ( alterStatementChangeColPosition )? ( restrictOrCascade )? ) ;
	public final HiveParser.alterStatementSuffixRenameCol_return alterStatementSuffixRenameCol() throws RecognitionException {
		HiveParser.alterStatementSuffixRenameCol_return retval = new HiveParser.alterStatementSuffixRenameCol_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_CHANGE274=null;
		Token KW_COLUMN275=null;
		Token KW_COMMENT278=null;
		ParserRuleReturnScope oldName =null;
		ParserRuleReturnScope newName =null;
		ParserRuleReturnScope colType276 =null;
		ParserRuleReturnScope alterColumnConstraint277 =null;
		ParserRuleReturnScope alterStatementChangeColPosition279 =null;
		ParserRuleReturnScope restrictOrCascade280 =null;

		ASTNode comment_tree=null;
		ASTNode KW_CHANGE274_tree=null;
		ASTNode KW_COLUMN275_tree=null;
		ASTNode KW_COMMENT278_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
		RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
		RewriteRuleTokenStream stream_KW_CHANGE=new RewriteRuleTokenStream(adaptor,"token KW_CHANGE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");
		RewriteRuleSubtreeStream stream_alterStatementChangeColPosition=new RewriteRuleSubtreeStream(adaptor,"rule alterStatementChangeColPosition");
		RewriteRuleSubtreeStream stream_restrictOrCascade=new RewriteRuleSubtreeStream(adaptor,"rule restrictOrCascade");
		RewriteRuleSubtreeStream stream_alterColumnConstraint=new RewriteRuleSubtreeStream(adaptor,"rule alterColumnConstraint");

		 pushMsg("rename column name", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:5: ( KW_CHANGE ( KW_COLUMN )? oldName= identifier newName= identifier colType ( alterColumnConstraint[$newName.tree] )? ( KW_COMMENT comment= StringLiteral )? ( alterStatementChangeColPosition )? ( restrictOrCascade )? -> ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterColumnConstraint )? ( alterStatementChangeColPosition )? ( restrictOrCascade )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:7: KW_CHANGE ( KW_COLUMN )? oldName= identifier newName= identifier colType ( alterColumnConstraint[$newName.tree] )? ( KW_COMMENT comment= StringLiteral )? ( alterStatementChangeColPosition )? ( restrictOrCascade )?
			{
			KW_CHANGE274=(Token)match(input,KW_CHANGE,FOLLOW_KW_CHANGE_in_alterStatementSuffixRenameCol5189); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CHANGE.add(KW_CHANGE274);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:17: ( KW_COLUMN )?
			int alt74=2;
			int LA74_0 = input.LA(1);
			if ( (LA74_0==KW_COLUMN) ) {
				alt74=1;
			}
			switch (alt74) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:17: KW_COLUMN
					{
					KW_COLUMN275=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementSuffixRenameCol5191); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COLUMN.add(KW_COLUMN275);

					}
					break;

			}

			pushFollow(FOLLOW_identifier_in_alterStatementSuffixRenameCol5196);
			oldName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(oldName.getTree());
			pushFollow(FOLLOW_identifier_in_alterStatementSuffixRenameCol5200);
			newName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(newName.getTree());
			pushFollow(FOLLOW_colType_in_alterStatementSuffixRenameCol5202);
			colType276=colType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_colType.add(colType276.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:74: ( alterColumnConstraint[$newName.tree] )?
			int alt75=2;
			int LA75_0 = input.LA(1);
			if ( (LA75_0==KW_CHECK||LA75_0==KW_CONSTRAINT||LA75_0==KW_DEFAULT||LA75_0==KW_NOT||LA75_0==KW_PRIMARY||LA75_0==KW_REFERENCES||LA75_0==KW_UNIQUE) ) {
				alt75=1;
			}
			switch (alt75) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:74: alterColumnConstraint[$newName.tree]
					{
					pushFollow(FOLLOW_alterColumnConstraint_in_alterStatementSuffixRenameCol5204);
					alterColumnConstraint277=alterColumnConstraint((newName!=null?((ASTNode)newName.getTree()):null));
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterColumnConstraint.add(alterColumnConstraint277.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:112: ( KW_COMMENT comment= StringLiteral )?
			int alt76=2;
			int LA76_0 = input.LA(1);
			if ( (LA76_0==KW_COMMENT) ) {
				alt76=1;
			}
			switch (alt76) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:113: KW_COMMENT comment= StringLiteral
					{
					KW_COMMENT278=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_alterStatementSuffixRenameCol5209); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT278);

					comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixRenameCol5213); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:148: ( alterStatementChangeColPosition )?
			int alt77=2;
			int LA77_0 = input.LA(1);
			if ( (LA77_0==KW_AFTER||LA77_0==KW_FIRST) ) {
				alt77=1;
			}
			switch (alt77) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:148: alterStatementChangeColPosition
					{
					pushFollow(FOLLOW_alterStatementChangeColPosition_in_alterStatementSuffixRenameCol5217);
					alterStatementChangeColPosition279=alterStatementChangeColPosition();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterStatementChangeColPosition.add(alterStatementChangeColPosition279.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:181: ( restrictOrCascade )?
			int alt78=2;
			int LA78_0 = input.LA(1);
			if ( (LA78_0==KW_CASCADE||LA78_0==KW_RESTRICT) ) {
				alt78=1;
			}
			switch (alt78) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:181: restrictOrCascade
					{
					pushFollow(FOLLOW_restrictOrCascade_in_alterStatementSuffixRenameCol5220);
					restrictOrCascade280=restrictOrCascade();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_restrictOrCascade.add(restrictOrCascade280.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: alterColumnConstraint, comment, alterStatementChangeColPosition, newName, oldName, colType, restrictOrCascade
			// token labels: comment
			// rule labels: newName, oldName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_newName=new RewriteRuleSubtreeStream(adaptor,"rule newName",newName!=null?newName.getTree():null);
			RewriteRuleSubtreeStream stream_oldName=new RewriteRuleSubtreeStream(adaptor,"rule oldName",oldName!=null?oldName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1257:5: -> ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterColumnConstraint )? ( alterStatementChangeColPosition )? ( restrictOrCascade )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1257:7: ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterColumnConstraint )? ( alterStatementChangeColPosition )? ( restrictOrCascade )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_RENAMECOL, "TOK_ALTERTABLE_RENAMECOL"), root_1);
				adaptor.addChild(root_1, stream_oldName.nextTree());
				adaptor.addChild(root_1, stream_newName.nextTree());
				adaptor.addChild(root_1, stream_colType.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1257:61: ( $comment)?
				if ( stream_comment.hasNext() ) {
					adaptor.addChild(root_1, stream_comment.nextNode());
				}
				stream_comment.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1257:70: ( alterColumnConstraint )?
				if ( stream_alterColumnConstraint.hasNext() ) {
					adaptor.addChild(root_1, stream_alterColumnConstraint.nextTree());
				}
				stream_alterColumnConstraint.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1257:93: ( alterStatementChangeColPosition )?
				if ( stream_alterStatementChangeColPosition.hasNext() ) {
					adaptor.addChild(root_1, stream_alterStatementChangeColPosition.nextTree());
				}
				stream_alterStatementChangeColPosition.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1257:126: ( restrictOrCascade )?
				if ( stream_restrictOrCascade.hasNext() ) {
					adaptor.addChild(root_1, stream_restrictOrCascade.nextTree());
				}
				stream_restrictOrCascade.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixRenameCol"


	public static class alterStatementSuffixUpdateStatsCol_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixUpdateStatsCol"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1260:1: alterStatementSuffixUpdateStatsCol : KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) ;
	public final HiveParser.alterStatementSuffixUpdateStatsCol_return alterStatementSuffixUpdateStatsCol() throws RecognitionException {
		HiveParser.alterStatementSuffixUpdateStatsCol_return retval = new HiveParser.alterStatementSuffixUpdateStatsCol_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_UPDATE281=null;
		Token KW_STATISTICS282=null;
		Token KW_FOR283=null;
		Token KW_COLUMN284=null;
		Token KW_SET285=null;
		Token KW_COMMENT287=null;
		ParserRuleReturnScope colName =null;
		ParserRuleReturnScope tableProperties286 =null;

		ASTNode comment_tree=null;
		ASTNode KW_UPDATE281_tree=null;
		ASTNode KW_STATISTICS282_tree=null;
		ASTNode KW_FOR283_tree=null;
		ASTNode KW_COLUMN284_tree=null;
		ASTNode KW_SET285_tree=null;
		ASTNode KW_COMMENT287_tree=null;
		RewriteRuleTokenStream stream_KW_STATISTICS=new RewriteRuleTokenStream(adaptor,"token KW_STATISTICS");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
		RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
		RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");

		 pushMsg("update column statistics", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1263:5: ( KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1263:7: KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )?
			{
			KW_UPDATE281=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_alterStatementSuffixUpdateStatsCol5278); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UPDATE.add(KW_UPDATE281);

			KW_STATISTICS282=(Token)match(input,KW_STATISTICS,FOLLOW_KW_STATISTICS_in_alterStatementSuffixUpdateStatsCol5280); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_STATISTICS.add(KW_STATISTICS282);

			KW_FOR283=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_alterStatementSuffixUpdateStatsCol5282); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FOR.add(KW_FOR283);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1263:38: ( KW_COLUMN )?
			int alt79=2;
			int LA79_0 = input.LA(1);
			if ( (LA79_0==KW_COLUMN) ) {
				alt79=1;
			}
			switch (alt79) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1263:38: KW_COLUMN
					{
					KW_COLUMN284=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementSuffixUpdateStatsCol5284); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COLUMN.add(KW_COLUMN284);

					}
					break;

			}

			pushFollow(FOLLOW_identifier_in_alterStatementSuffixUpdateStatsCol5289);
			colName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(colName.getTree());
			KW_SET285=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixUpdateStatsCol5291); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET285);

			pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixUpdateStatsCol5293);
			tableProperties286=tableProperties();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties286.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1263:91: ( KW_COMMENT comment= StringLiteral )?
			int alt80=2;
			int LA80_0 = input.LA(1);
			if ( (LA80_0==KW_COMMENT) ) {
				alt80=1;
			}
			switch (alt80) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1263:92: KW_COMMENT comment= StringLiteral
					{
					KW_COMMENT287=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_alterStatementSuffixUpdateStatsCol5296); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT287);

					comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixUpdateStatsCol5300); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

					}
					break;

			}

			// AST REWRITE
			// elements: tableProperties, comment, colName
			// token labels: comment
			// rule labels: colName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1264:5: -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1264:7: ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_UPDATECOLSTATS, "TOK_ALTERTABLE_UPDATECOLSTATS"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, stream_tableProperties.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1264:65: ( $comment)?
				if ( stream_comment.hasNext() ) {
					adaptor.addChild(root_1, stream_comment.nextNode());
				}
				stream_comment.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixUpdateStatsCol"


	public static class alterStatementSuffixUpdateStats_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixUpdateStats"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1267:1: alterStatementSuffixUpdateStats : KW_UPDATE KW_STATISTICS KW_SET tableProperties -> ^( TOK_ALTERTABLE_UPDATESTATS tableProperties ) ;
	public final HiveParser.alterStatementSuffixUpdateStats_return alterStatementSuffixUpdateStats() throws RecognitionException {
		HiveParser.alterStatementSuffixUpdateStats_return retval = new HiveParser.alterStatementSuffixUpdateStats_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UPDATE288=null;
		Token KW_STATISTICS289=null;
		Token KW_SET290=null;
		ParserRuleReturnScope tableProperties291 =null;

		ASTNode KW_UPDATE288_tree=null;
		ASTNode KW_STATISTICS289_tree=null;
		ASTNode KW_SET290_tree=null;
		RewriteRuleTokenStream stream_KW_STATISTICS=new RewriteRuleTokenStream(adaptor,"token KW_STATISTICS");
		RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");

		 pushMsg("update basic statistics", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1270:5: ( KW_UPDATE KW_STATISTICS KW_SET tableProperties -> ^( TOK_ALTERTABLE_UPDATESTATS tableProperties ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1270:7: KW_UPDATE KW_STATISTICS KW_SET tableProperties
			{
			KW_UPDATE288=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_alterStatementSuffixUpdateStats5347); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UPDATE.add(KW_UPDATE288);

			KW_STATISTICS289=(Token)match(input,KW_STATISTICS,FOLLOW_KW_STATISTICS_in_alterStatementSuffixUpdateStats5349); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_STATISTICS.add(KW_STATISTICS289);

			KW_SET290=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixUpdateStats5351); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET290);

			pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixUpdateStats5353);
			tableProperties291=tableProperties();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties291.getTree());
			// AST REWRITE
			// elements: tableProperties
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1271:5: -> ^( TOK_ALTERTABLE_UPDATESTATS tableProperties )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1271:7: ^( TOK_ALTERTABLE_UPDATESTATS tableProperties )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_UPDATESTATS, "TOK_ALTERTABLE_UPDATESTATS"), root_1);
				adaptor.addChild(root_1, stream_tableProperties.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixUpdateStats"


	public static class alterStatementChangeColPosition_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementChangeColPosition"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1274:1: alterStatementChangeColPosition : (first= KW_FIRST | KW_AFTER afterCol= identifier -> {$first != null}? ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION ) -> ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol) );
	public final HiveParser.alterStatementChangeColPosition_return alterStatementChangeColPosition() throws RecognitionException {
		HiveParser.alterStatementChangeColPosition_return retval = new HiveParser.alterStatementChangeColPosition_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token first=null;
		Token KW_AFTER292=null;
		ParserRuleReturnScope afterCol =null;

		ASTNode first_tree=null;
		ASTNode KW_AFTER292_tree=null;
		RewriteRuleTokenStream stream_KW_AFTER=new RewriteRuleTokenStream(adaptor,"token KW_AFTER");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1275:5: (first= KW_FIRST | KW_AFTER afterCol= identifier -> {$first != null}? ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION ) -> ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol) )
			int alt81=2;
			int LA81_0 = input.LA(1);
			if ( (LA81_0==KW_FIRST) ) {
				alt81=1;
			}
			else if ( (LA81_0==KW_AFTER) ) {
				alt81=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 81, 0, input);
				throw nvae;
			}

			switch (alt81) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1275:7: first= KW_FIRST
					{
					root_0 = (ASTNode)adaptor.nil();


					first=(Token)match(input,KW_FIRST,FOLLOW_KW_FIRST_in_alterStatementChangeColPosition5383); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					first_tree = (ASTNode)adaptor.create(first);
					adaptor.addChild(root_0, first_tree);
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1275:22: KW_AFTER afterCol= identifier
					{
					KW_AFTER292=(Token)match(input,KW_AFTER,FOLLOW_KW_AFTER_in_alterStatementChangeColPosition5385); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_AFTER.add(KW_AFTER292);

					pushFollow(FOLLOW_identifier_in_alterStatementChangeColPosition5389);
					afterCol=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(afterCol.getTree());
					// AST REWRITE
					// elements: afterCol
					// token labels: 
					// rule labels: afterCol, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_afterCol=new RewriteRuleSubtreeStream(adaptor,"rule afterCol",afterCol!=null?afterCol.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1276:5: -> {$first != null}? ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION )
					if (first != null) {
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1276:25: ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION, "TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}

					else // 1277:5: -> ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1277:8: ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION, "TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION"), root_1);
						adaptor.addChild(root_1, stream_afterCol.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementChangeColPosition"


	public static class alterStatementSuffixAddPartitions_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixAddPartitions"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1280:1: alterStatementSuffixAddPartitions[boolean table] : KW_ADD ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ -> { table }? ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) -> ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) ;
	public final HiveParser.alterStatementSuffixAddPartitions_return alterStatementSuffixAddPartitions(boolean table) throws RecognitionException {
		HiveParser.alterStatementSuffixAddPartitions_return retval = new HiveParser.alterStatementSuffixAddPartitions_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ADD293=null;
		ParserRuleReturnScope ifNotExists294 =null;
		ParserRuleReturnScope alterStatementSuffixAddPartitionsElement295 =null;

		ASTNode KW_ADD293_tree=null;
		RewriteRuleTokenStream stream_KW_ADD=new RewriteRuleTokenStream(adaptor,"token KW_ADD");
		RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
		RewriteRuleSubtreeStream stream_alterStatementSuffixAddPartitionsElement=new RewriteRuleSubtreeStream(adaptor,"rule alterStatementSuffixAddPartitionsElement");

		 pushMsg("add partition statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:5: ( KW_ADD ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ -> { table }? ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) -> ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:7: KW_ADD ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+
			{
			KW_ADD293=(Token)match(input,KW_ADD,FOLLOW_KW_ADD_in_alterStatementSuffixAddPartitions5442); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ADD.add(KW_ADD293);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:14: ( ifNotExists )?
			int alt82=2;
			int LA82_0 = input.LA(1);
			if ( (LA82_0==KW_IF) ) {
				alt82=1;
			}
			switch (alt82) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:14: ifNotExists
					{
					pushFollow(FOLLOW_ifNotExists_in_alterStatementSuffixAddPartitions5444);
					ifNotExists294=ifNotExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifNotExists.add(ifNotExists294.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:27: ( alterStatementSuffixAddPartitionsElement )+
			int cnt83=0;
			loop83:
			while (true) {
				int alt83=2;
				int LA83_0 = input.LA(1);
				if ( (LA83_0==KW_PARTITION) ) {
					alt83=1;
				}

				switch (alt83) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:27: alterStatementSuffixAddPartitionsElement
					{
					pushFollow(FOLLOW_alterStatementSuffixAddPartitionsElement_in_alterStatementSuffixAddPartitions5447);
					alterStatementSuffixAddPartitionsElement295=alterStatementSuffixAddPartitionsElement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_alterStatementSuffixAddPartitionsElement.add(alterStatementSuffixAddPartitionsElement295.getTree());
					}
					break;

				default :
					if ( cnt83 >= 1 ) break loop83;
					if (state.backtracking>0) {state.failed=true; return retval;}
					EarlyExitException eee = new EarlyExitException(83, input);
					throw eee;
				}
				cnt83++;
			}

			// AST REWRITE
			// elements: alterStatementSuffixAddPartitionsElement, ifNotExists, ifNotExists, alterStatementSuffixAddPartitionsElement
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1284:5: -> { table }? ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
			if ( table ) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1284:19: ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_ADDPARTS, "TOK_ALTERTABLE_ADDPARTS"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1284:45: ( ifNotExists )?
				if ( stream_ifNotExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifNotExists.nextTree());
				}
				stream_ifNotExists.reset();

				if ( !(stream_alterStatementSuffixAddPartitionsElement.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_alterStatementSuffixAddPartitionsElement.hasNext() ) {
					adaptor.addChild(root_1, stream_alterStatementSuffixAddPartitionsElement.nextTree());
				}
				stream_alterStatementSuffixAddPartitionsElement.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 1285:5: -> ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1285:19: ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERVIEW_ADDPARTS, "TOK_ALTERVIEW_ADDPARTS"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1285:44: ( ifNotExists )?
				if ( stream_ifNotExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifNotExists.nextTree());
				}
				stream_ifNotExists.reset();

				if ( !(stream_alterStatementSuffixAddPartitionsElement.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_alterStatementSuffixAddPartitionsElement.hasNext() ) {
					adaptor.addChild(root_1, stream_alterStatementSuffixAddPartitionsElement.nextTree());
				}
				stream_alterStatementSuffixAddPartitionsElement.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixAddPartitions"


	public static class alterStatementSuffixAddPartitionsElement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixAddPartitionsElement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1288:1: alterStatementSuffixAddPartitionsElement : partitionSpec ( partitionLocation )? ;
	public final HiveParser.alterStatementSuffixAddPartitionsElement_return alterStatementSuffixAddPartitionsElement() throws RecognitionException {
		HiveParser.alterStatementSuffixAddPartitionsElement_return retval = new HiveParser.alterStatementSuffixAddPartitionsElement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope partitionSpec296 =null;
		ParserRuleReturnScope partitionLocation297 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1289:5: ( partitionSpec ( partitionLocation )? )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1289:7: partitionSpec ( partitionLocation )?
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixAddPartitionsElement5510);
			partitionSpec296=partitionSpec();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, partitionSpec296.getTree());

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1289:21: ( partitionLocation )?
			int alt84=2;
			int LA84_0 = input.LA(1);
			if ( (LA84_0==KW_LOCATION) ) {
				alt84=1;
			}
			switch (alt84) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1289:21: partitionLocation
					{
					pushFollow(FOLLOW_partitionLocation_in_alterStatementSuffixAddPartitionsElement5512);
					partitionLocation297=partitionLocation();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, partitionLocation297.getTree());

					}
					break;

			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixAddPartitionsElement"


	public static class alterStatementSuffixTouch_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixTouch"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1292:1: alterStatementSuffixTouch : KW_TOUCH ( partitionSpec )* -> ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* ) ;
	public final HiveParser.alterStatementSuffixTouch_return alterStatementSuffixTouch() throws RecognitionException {
		HiveParser.alterStatementSuffixTouch_return retval = new HiveParser.alterStatementSuffixTouch_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_TOUCH298=null;
		ParserRuleReturnScope partitionSpec299 =null;

		ASTNode KW_TOUCH298_tree=null;
		RewriteRuleTokenStream stream_KW_TOUCH=new RewriteRuleTokenStream(adaptor,"token KW_TOUCH");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");

		 pushMsg("touch statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1295:5: ( KW_TOUCH ( partitionSpec )* -> ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1295:7: KW_TOUCH ( partitionSpec )*
			{
			KW_TOUCH298=(Token)match(input,KW_TOUCH,FOLLOW_KW_TOUCH_in_alterStatementSuffixTouch5540); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TOUCH.add(KW_TOUCH298);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1295:16: ( partitionSpec )*
			loop85:
			while (true) {
				int alt85=2;
				int LA85_0 = input.LA(1);
				if ( (LA85_0==KW_PARTITION) ) {
					alt85=1;
				}

				switch (alt85) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1295:17: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixTouch5543);
					partitionSpec299=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec299.getTree());
					}
					break;

				default :
					break loop85;
				}
			}

			// AST REWRITE
			// elements: partitionSpec
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1296:5: -> ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1296:8: ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_TOUCH, "TOK_ALTERTABLE_TOUCH"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1296:31: ( partitionSpec )*
				while ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixTouch"


	public static class alterStatementSuffixArchive_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixArchive"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1299:1: alterStatementSuffixArchive : KW_ARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* ) ;
	public final HiveParser.alterStatementSuffixArchive_return alterStatementSuffixArchive() throws RecognitionException {
		HiveParser.alterStatementSuffixArchive_return retval = new HiveParser.alterStatementSuffixArchive_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ARCHIVE300=null;
		ParserRuleReturnScope partitionSpec301 =null;

		ASTNode KW_ARCHIVE300_tree=null;
		RewriteRuleTokenStream stream_KW_ARCHIVE=new RewriteRuleTokenStream(adaptor,"token KW_ARCHIVE");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");

		 pushMsg("archive statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1302:5: ( KW_ARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1302:7: KW_ARCHIVE ( partitionSpec )*
			{
			KW_ARCHIVE300=(Token)match(input,KW_ARCHIVE,FOLLOW_KW_ARCHIVE_in_alterStatementSuffixArchive5587); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ARCHIVE.add(KW_ARCHIVE300);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1302:18: ( partitionSpec )*
			loop86:
			while (true) {
				int alt86=2;
				int LA86_0 = input.LA(1);
				if ( (LA86_0==KW_PARTITION) ) {
					alt86=1;
				}

				switch (alt86) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1302:19: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixArchive5590);
					partitionSpec301=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec301.getTree());
					}
					break;

				default :
					break loop86;
				}
			}

			// AST REWRITE
			// elements: partitionSpec
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1303:5: -> ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1303:8: ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_ARCHIVE, "TOK_ALTERTABLE_ARCHIVE"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1303:33: ( partitionSpec )*
				while ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixArchive"


	public static class alterStatementSuffixUnArchive_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixUnArchive"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1306:1: alterStatementSuffixUnArchive : KW_UNARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* ) ;
	public final HiveParser.alterStatementSuffixUnArchive_return alterStatementSuffixUnArchive() throws RecognitionException {
		HiveParser.alterStatementSuffixUnArchive_return retval = new HiveParser.alterStatementSuffixUnArchive_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UNARCHIVE302=null;
		ParserRuleReturnScope partitionSpec303 =null;

		ASTNode KW_UNARCHIVE302_tree=null;
		RewriteRuleTokenStream stream_KW_UNARCHIVE=new RewriteRuleTokenStream(adaptor,"token KW_UNARCHIVE");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");

		 pushMsg("unarchive statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1309:5: ( KW_UNARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1309:7: KW_UNARCHIVE ( partitionSpec )*
			{
			KW_UNARCHIVE302=(Token)match(input,KW_UNARCHIVE,FOLLOW_KW_UNARCHIVE_in_alterStatementSuffixUnArchive5634); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UNARCHIVE.add(KW_UNARCHIVE302);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1309:20: ( partitionSpec )*
			loop87:
			while (true) {
				int alt87=2;
				int LA87_0 = input.LA(1);
				if ( (LA87_0==KW_PARTITION) ) {
					alt87=1;
				}

				switch (alt87) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1309:21: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixUnArchive5637);
					partitionSpec303=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec303.getTree());
					}
					break;

				default :
					break loop87;
				}
			}

			// AST REWRITE
			// elements: partitionSpec
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1310:5: -> ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:8: ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_UNARCHIVE, "TOK_ALTERTABLE_UNARCHIVE"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:35: ( partitionSpec )*
				while ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixUnArchive"


	public static class partitionLocation_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "partitionLocation"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1313:1: partitionLocation : KW_LOCATION locn= StringLiteral -> ^( TOK_PARTITIONLOCATION $locn) ;
	public final HiveParser.partitionLocation_return partitionLocation() throws RecognitionException {
		HiveParser.partitionLocation_return retval = new HiveParser.partitionLocation_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token locn=null;
		Token KW_LOCATION304=null;

		ASTNode locn_tree=null;
		ASTNode KW_LOCATION304_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");

		 pushMsg("partition location", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:5: ( KW_LOCATION locn= StringLiteral -> ^( TOK_PARTITIONLOCATION $locn) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:7: KW_LOCATION locn= StringLiteral
			{
			KW_LOCATION304=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_partitionLocation5687); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCATION.add(KW_LOCATION304);

			locn=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_partitionLocation5691); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(locn);

			// AST REWRITE
			// elements: locn
			// token labels: locn
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_locn=new RewriteRuleTokenStream(adaptor,"token locn",locn);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1317:38: -> ^( TOK_PARTITIONLOCATION $locn)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:41: ^( TOK_PARTITIONLOCATION $locn)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PARTITIONLOCATION, "TOK_PARTITIONLOCATION"), root_1);
				adaptor.addChild(root_1, stream_locn.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "partitionLocation"


	public static class alterStatementSuffixDropPartitions_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixDropPartitions"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1320:1: alterStatementSuffixDropPartitions[boolean table] : KW_DROP ( ifExists )? dropPartitionSpec ( COMMA dropPartitionSpec )* ( KW_PURGE )? ( replicationClause )? -> { table }? ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( KW_PURGE )? ( replicationClause )? ) -> ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( replicationClause )? ) ;
	public final HiveParser.alterStatementSuffixDropPartitions_return alterStatementSuffixDropPartitions(boolean table) throws RecognitionException {
		HiveParser.alterStatementSuffixDropPartitions_return retval = new HiveParser.alterStatementSuffixDropPartitions_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP305=null;
		Token COMMA308=null;
		Token KW_PURGE310=null;
		ParserRuleReturnScope ifExists306 =null;
		ParserRuleReturnScope dropPartitionSpec307 =null;
		ParserRuleReturnScope dropPartitionSpec309 =null;
		ParserRuleReturnScope replicationClause311 =null;

		ASTNode KW_DROP305_tree=null;
		ASTNode COMMA308_tree=null;
		ASTNode KW_PURGE310_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_PURGE=new RewriteRuleTokenStream(adaptor,"token KW_PURGE");
		RewriteRuleSubtreeStream stream_dropPartitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule dropPartitionSpec");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
		RewriteRuleSubtreeStream stream_replicationClause=new RewriteRuleSubtreeStream(adaptor,"rule replicationClause");

		 pushMsg("drop partition statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:5: ( KW_DROP ( ifExists )? dropPartitionSpec ( COMMA dropPartitionSpec )* ( KW_PURGE )? ( replicationClause )? -> { table }? ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( KW_PURGE )? ( replicationClause )? ) -> ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( replicationClause )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:7: KW_DROP ( ifExists )? dropPartitionSpec ( COMMA dropPartitionSpec )* ( KW_PURGE )? ( replicationClause )?
			{
			KW_DROP305=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_alterStatementSuffixDropPartitions5728); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP305);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:15: ( ifExists )?
			int alt88=2;
			int LA88_0 = input.LA(1);
			if ( (LA88_0==KW_IF) ) {
				alt88=1;
			}
			switch (alt88) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:15: ifExists
					{
					pushFollow(FOLLOW_ifExists_in_alterStatementSuffixDropPartitions5730);
					ifExists306=ifExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifExists.add(ifExists306.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_dropPartitionSpec_in_alterStatementSuffixDropPartitions5733);
			dropPartitionSpec307=dropPartitionSpec();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_dropPartitionSpec.add(dropPartitionSpec307.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:43: ( COMMA dropPartitionSpec )*
			loop89:
			while (true) {
				int alt89=2;
				int LA89_0 = input.LA(1);
				if ( (LA89_0==COMMA) ) {
					alt89=1;
				}

				switch (alt89) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:44: COMMA dropPartitionSpec
					{
					COMMA308=(Token)match(input,COMMA,FOLLOW_COMMA_in_alterStatementSuffixDropPartitions5736); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA308);

					pushFollow(FOLLOW_dropPartitionSpec_in_alterStatementSuffixDropPartitions5738);
					dropPartitionSpec309=dropPartitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_dropPartitionSpec.add(dropPartitionSpec309.getTree());
					}
					break;

				default :
					break loop89;
				}
			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:70: ( KW_PURGE )?
			int alt90=2;
			int LA90_0 = input.LA(1);
			if ( (LA90_0==KW_PURGE) ) {
				alt90=1;
			}
			switch (alt90) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:70: KW_PURGE
					{
					KW_PURGE310=(Token)match(input,KW_PURGE,FOLLOW_KW_PURGE_in_alterStatementSuffixDropPartitions5742); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_PURGE.add(KW_PURGE310);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:80: ( replicationClause )?
			int alt91=2;
			int LA91_0 = input.LA(1);
			if ( (LA91_0==KW_FOR) ) {
				alt91=1;
			}
			switch (alt91) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1323:80: replicationClause
					{
					pushFollow(FOLLOW_replicationClause_in_alterStatementSuffixDropPartitions5745);
					replicationClause311=replicationClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_replicationClause.add(replicationClause311.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: KW_PURGE, ifExists, dropPartitionSpec, ifExists, replicationClause, replicationClause, dropPartitionSpec
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1324:5: -> { table }? ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( KW_PURGE )? ( replicationClause )? )
			if ( table ) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1324:19: ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( KW_PURGE )? ( replicationClause )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_DROPPARTS, "TOK_ALTERTABLE_DROPPARTS"), root_1);
				if ( !(stream_dropPartitionSpec.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_dropPartitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_dropPartitionSpec.nextTree());
				}
				stream_dropPartitionSpec.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1324:65: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1324:75: ( KW_PURGE )?
				if ( stream_KW_PURGE.hasNext() ) {
					adaptor.addChild(root_1, stream_KW_PURGE.nextNode());
				}
				stream_KW_PURGE.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1324:85: ( replicationClause )?
				if ( stream_replicationClause.hasNext() ) {
					adaptor.addChild(root_1, stream_replicationClause.nextTree());
				}
				stream_replicationClause.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 1325:5: -> ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( replicationClause )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:19: ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( replicationClause )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERVIEW_DROPPARTS, "TOK_ALTERVIEW_DROPPARTS"), root_1);
				if ( !(stream_dropPartitionSpec.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_dropPartitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_dropPartitionSpec.nextTree());
				}
				stream_dropPartitionSpec.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:64: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:74: ( replicationClause )?
				if ( stream_replicationClause.hasNext() ) {
					adaptor.addChild(root_1, stream_replicationClause.nextTree());
				}
				stream_replicationClause.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixDropPartitions"


	public static class alterStatementSuffixProperties_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixProperties"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1328:1: alterStatementSuffixProperties : ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? ) );
	public final HiveParser.alterStatementSuffixProperties_return alterStatementSuffixProperties() throws RecognitionException {
		HiveParser.alterStatementSuffixProperties_return retval = new HiveParser.alterStatementSuffixProperties_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET312=null;
		Token KW_TBLPROPERTIES313=null;
		Token KW_UNSET315=null;
		Token KW_TBLPROPERTIES316=null;
		ParserRuleReturnScope tableProperties314 =null;
		ParserRuleReturnScope ifExists317 =null;
		ParserRuleReturnScope tableProperties318 =null;

		ASTNode KW_SET312_tree=null;
		ASTNode KW_TBLPROPERTIES313_tree=null;
		ASTNode KW_UNSET315_tree=null;
		ASTNode KW_TBLPROPERTIES316_tree=null;
		RewriteRuleTokenStream stream_KW_UNSET=new RewriteRuleTokenStream(adaptor,"token KW_UNSET");
		RewriteRuleTokenStream stream_KW_TBLPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_TBLPROPERTIES");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");

		 pushMsg("alter properties statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1331:5: ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? ) )
			int alt93=2;
			int LA93_0 = input.LA(1);
			if ( (LA93_0==KW_SET) ) {
				alt93=1;
			}
			else if ( (LA93_0==KW_UNSET) ) {
				alt93=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 93, 0, input);
				throw nvae;
			}

			switch (alt93) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1331:7: KW_SET KW_TBLPROPERTIES tableProperties
					{
					KW_SET312=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixProperties5827); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET312);

					KW_TBLPROPERTIES313=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterStatementSuffixProperties5829); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES313);

					pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixProperties5831);
					tableProperties314=tableProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties314.getTree());
					// AST REWRITE
					// elements: tableProperties
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1332:5: -> ^( TOK_ALTERTABLE_PROPERTIES tableProperties )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1332:8: ^( TOK_ALTERTABLE_PROPERTIES tableProperties )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_PROPERTIES, "TOK_ALTERTABLE_PROPERTIES"), root_1);
						adaptor.addChild(root_1, stream_tableProperties.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:7: KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties
					{
					KW_UNSET315=(Token)match(input,KW_UNSET,FOLLOW_KW_UNSET_in_alterStatementSuffixProperties5851); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_UNSET.add(KW_UNSET315);

					KW_TBLPROPERTIES316=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterStatementSuffixProperties5853); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES316);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:33: ( ifExists )?
					int alt92=2;
					int LA92_0 = input.LA(1);
					if ( (LA92_0==KW_IF) ) {
						alt92=1;
					}
					switch (alt92) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:33: ifExists
							{
							pushFollow(FOLLOW_ifExists_in_alterStatementSuffixProperties5855);
							ifExists317=ifExists();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_ifExists.add(ifExists317.getTree());
							}
							break;

					}

					pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixProperties5858);
					tableProperties318=tableProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties318.getTree());
					// AST REWRITE
					// elements: tableProperties, ifExists
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1334:5: -> ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:8: ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_DROPPROPERTIES, "TOK_ALTERTABLE_DROPPROPERTIES"), root_1);
						adaptor.addChild(root_1, stream_tableProperties.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:56: ( ifExists )?
						if ( stream_ifExists.hasNext() ) {
							adaptor.addChild(root_1, stream_ifExists.nextTree());
						}
						stream_ifExists.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixProperties"


	public static class alterViewSuffixProperties_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterViewSuffixProperties"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1337:1: alterViewSuffixProperties : ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERVIEW_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? ) );
	public final HiveParser.alterViewSuffixProperties_return alterViewSuffixProperties() throws RecognitionException {
		HiveParser.alterViewSuffixProperties_return retval = new HiveParser.alterViewSuffixProperties_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET319=null;
		Token KW_TBLPROPERTIES320=null;
		Token KW_UNSET322=null;
		Token KW_TBLPROPERTIES323=null;
		ParserRuleReturnScope tableProperties321 =null;
		ParserRuleReturnScope ifExists324 =null;
		ParserRuleReturnScope tableProperties325 =null;

		ASTNode KW_SET319_tree=null;
		ASTNode KW_TBLPROPERTIES320_tree=null;
		ASTNode KW_UNSET322_tree=null;
		ASTNode KW_TBLPROPERTIES323_tree=null;
		RewriteRuleTokenStream stream_KW_UNSET=new RewriteRuleTokenStream(adaptor,"token KW_UNSET");
		RewriteRuleTokenStream stream_KW_TBLPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_TBLPROPERTIES");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");

		 pushMsg("alter view properties statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:5: ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERVIEW_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? ) )
			int alt95=2;
			int LA95_0 = input.LA(1);
			if ( (LA95_0==KW_SET) ) {
				alt95=1;
			}
			else if ( (LA95_0==KW_UNSET) ) {
				alt95=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 95, 0, input);
				throw nvae;
			}

			switch (alt95) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:7: KW_SET KW_TBLPROPERTIES tableProperties
					{
					KW_SET319=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterViewSuffixProperties5900); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET319);

					KW_TBLPROPERTIES320=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterViewSuffixProperties5902); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES320);

					pushFollow(FOLLOW_tableProperties_in_alterViewSuffixProperties5904);
					tableProperties321=tableProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties321.getTree());
					// AST REWRITE
					// elements: tableProperties
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1341:5: -> ^( TOK_ALTERVIEW_PROPERTIES tableProperties )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1341:8: ^( TOK_ALTERVIEW_PROPERTIES tableProperties )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERVIEW_PROPERTIES, "TOK_ALTERVIEW_PROPERTIES"), root_1);
						adaptor.addChild(root_1, stream_tableProperties.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:7: KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties
					{
					KW_UNSET322=(Token)match(input,KW_UNSET,FOLLOW_KW_UNSET_in_alterViewSuffixProperties5924); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_UNSET.add(KW_UNSET322);

					KW_TBLPROPERTIES323=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterViewSuffixProperties5926); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES323);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:33: ( ifExists )?
					int alt94=2;
					int LA94_0 = input.LA(1);
					if ( (LA94_0==KW_IF) ) {
						alt94=1;
					}
					switch (alt94) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:33: ifExists
							{
							pushFollow(FOLLOW_ifExists_in_alterViewSuffixProperties5928);
							ifExists324=ifExists();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_ifExists.add(ifExists324.getTree());
							}
							break;

					}

					pushFollow(FOLLOW_tableProperties_in_alterViewSuffixProperties5931);
					tableProperties325=tableProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties325.getTree());
					// AST REWRITE
					// elements: ifExists, tableProperties
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1343:5: -> ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:8: ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERVIEW_DROPPROPERTIES, "TOK_ALTERVIEW_DROPPROPERTIES"), root_1);
						adaptor.addChild(root_1, stream_tableProperties.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:55: ( ifExists )?
						if ( stream_ifExists.hasNext() ) {
							adaptor.addChild(root_1, stream_ifExists.nextTree());
						}
						stream_ifExists.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterViewSuffixProperties"


	public static class alterMaterializedViewSuffixRewrite_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterMaterializedViewSuffixRewrite"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1346:1: alterMaterializedViewSuffixRewrite : (mvRewriteFlag= rewriteEnabled |mvRewriteFlag= rewriteDisabled ) -> ^( TOK_ALTER_MATERIALIZED_VIEW_REWRITE $mvRewriteFlag) ;
	public final HiveParser.alterMaterializedViewSuffixRewrite_return alterMaterializedViewSuffixRewrite() throws RecognitionException {
		HiveParser.alterMaterializedViewSuffixRewrite_return retval = new HiveParser.alterMaterializedViewSuffixRewrite_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope mvRewriteFlag =null;

		RewriteRuleSubtreeStream stream_rewriteEnabled=new RewriteRuleSubtreeStream(adaptor,"rule rewriteEnabled");
		RewriteRuleSubtreeStream stream_rewriteDisabled=new RewriteRuleSubtreeStream(adaptor,"rule rewriteDisabled");

		 pushMsg("alter materialized view rewrite statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1349:5: ( (mvRewriteFlag= rewriteEnabled |mvRewriteFlag= rewriteDisabled ) -> ^( TOK_ALTER_MATERIALIZED_VIEW_REWRITE $mvRewriteFlag) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1349:7: (mvRewriteFlag= rewriteEnabled |mvRewriteFlag= rewriteDisabled )
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1349:7: (mvRewriteFlag= rewriteEnabled |mvRewriteFlag= rewriteDisabled )
			int alt96=2;
			int LA96_0 = input.LA(1);
			if ( (LA96_0==KW_ENABLE) ) {
				alt96=1;
			}
			else if ( (LA96_0==KW_DISABLE) ) {
				alt96=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 96, 0, input);
				throw nvae;
			}

			switch (alt96) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1349:8: mvRewriteFlag= rewriteEnabled
					{
					pushFollow(FOLLOW_rewriteEnabled_in_alterMaterializedViewSuffixRewrite5976);
					mvRewriteFlag=rewriteEnabled();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_rewriteEnabled.add(mvRewriteFlag.getTree());
					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1349:39: mvRewriteFlag= rewriteDisabled
					{
					pushFollow(FOLLOW_rewriteDisabled_in_alterMaterializedViewSuffixRewrite5982);
					mvRewriteFlag=rewriteDisabled();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_rewriteDisabled.add(mvRewriteFlag.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: mvRewriteFlag
			// token labels: 
			// rule labels: mvRewriteFlag, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_mvRewriteFlag=new RewriteRuleSubtreeStream(adaptor,"rule mvRewriteFlag",mvRewriteFlag!=null?mvRewriteFlag.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1350:5: -> ^( TOK_ALTER_MATERIALIZED_VIEW_REWRITE $mvRewriteFlag)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1350:8: ^( TOK_ALTER_MATERIALIZED_VIEW_REWRITE $mvRewriteFlag)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTER_MATERIALIZED_VIEW_REWRITE, "TOK_ALTER_MATERIALIZED_VIEW_REWRITE"), root_1);
				adaptor.addChild(root_1, stream_mvRewriteFlag.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterMaterializedViewSuffixRewrite"


	public static class alterMaterializedViewSuffixRebuild_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterMaterializedViewSuffixRebuild"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1353:1: alterMaterializedViewSuffixRebuild : KW_REBUILD -> ^( TOK_ALTER_MATERIALIZED_VIEW_REBUILD ) ;
	public final HiveParser.alterMaterializedViewSuffixRebuild_return alterMaterializedViewSuffixRebuild() throws RecognitionException {
		HiveParser.alterMaterializedViewSuffixRebuild_return retval = new HiveParser.alterMaterializedViewSuffixRebuild_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_REBUILD326=null;

		ASTNode KW_REBUILD326_tree=null;
		RewriteRuleTokenStream stream_KW_REBUILD=new RewriteRuleTokenStream(adaptor,"token KW_REBUILD");

		 pushMsg("alter materialized view rebuild statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1356:5: ( KW_REBUILD -> ^( TOK_ALTER_MATERIALIZED_VIEW_REBUILD ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1356:7: KW_REBUILD
			{
			KW_REBUILD326=(Token)match(input,KW_REBUILD,FOLLOW_KW_REBUILD_in_alterMaterializedViewSuffixRebuild6023); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REBUILD.add(KW_REBUILD326);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1356:18: -> ^( TOK_ALTER_MATERIALIZED_VIEW_REBUILD )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1356:21: ^( TOK_ALTER_MATERIALIZED_VIEW_REBUILD )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTER_MATERIALIZED_VIEW_REBUILD, "TOK_ALTER_MATERIALIZED_VIEW_REBUILD"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterMaterializedViewSuffixRebuild"


	public static class alterStatementSuffixSerdeProperties_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixSerdeProperties"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1359:1: alterStatementSuffixSerdeProperties : ( KW_SET KW_SERDE serdeName= StringLiteral ( KW_WITH KW_SERDEPROPERTIES tableProperties )? -> ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? ) | KW_SET KW_SERDEPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties ) );
	public final HiveParser.alterStatementSuffixSerdeProperties_return alterStatementSuffixSerdeProperties() throws RecognitionException {
		HiveParser.alterStatementSuffixSerdeProperties_return retval = new HiveParser.alterStatementSuffixSerdeProperties_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token serdeName=null;
		Token KW_SET327=null;
		Token KW_SERDE328=null;
		Token KW_WITH329=null;
		Token KW_SERDEPROPERTIES330=null;
		Token KW_SET332=null;
		Token KW_SERDEPROPERTIES333=null;
		ParserRuleReturnScope tableProperties331 =null;
		ParserRuleReturnScope tableProperties334 =null;

		ASTNode serdeName_tree=null;
		ASTNode KW_SET327_tree=null;
		ASTNode KW_SERDE328_tree=null;
		ASTNode KW_WITH329_tree=null;
		ASTNode KW_SERDEPROPERTIES330_tree=null;
		ASTNode KW_SET332_tree=null;
		ASTNode KW_SERDEPROPERTIES333_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_SERDEPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_SERDEPROPERTIES");
		RewriteRuleTokenStream stream_KW_SERDE=new RewriteRuleTokenStream(adaptor,"token KW_SERDE");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");

		 pushMsg("alter serdes statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1362:5: ( KW_SET KW_SERDE serdeName= StringLiteral ( KW_WITH KW_SERDEPROPERTIES tableProperties )? -> ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? ) | KW_SET KW_SERDEPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties ) )
			int alt98=2;
			int LA98_0 = input.LA(1);
			if ( (LA98_0==KW_SET) ) {
				int LA98_1 = input.LA(2);
				if ( (LA98_1==KW_SERDE) ) {
					alt98=1;
				}
				else if ( (LA98_1==KW_SERDEPROPERTIES) ) {
					alt98=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 98, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 98, 0, input);
				throw nvae;
			}

			switch (alt98) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1362:7: KW_SET KW_SERDE serdeName= StringLiteral ( KW_WITH KW_SERDEPROPERTIES tableProperties )?
					{
					KW_SET327=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixSerdeProperties6056); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET327);

					KW_SERDE328=(Token)match(input,KW_SERDE,FOLLOW_KW_SERDE_in_alterStatementSuffixSerdeProperties6058); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SERDE.add(KW_SERDE328);

					serdeName=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixSerdeProperties6062); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(serdeName);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1362:47: ( KW_WITH KW_SERDEPROPERTIES tableProperties )?
					int alt97=2;
					int LA97_0 = input.LA(1);
					if ( (LA97_0==KW_WITH) ) {
						alt97=1;
					}
					switch (alt97) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1362:48: KW_WITH KW_SERDEPROPERTIES tableProperties
							{
							KW_WITH329=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_alterStatementSuffixSerdeProperties6065); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH329);

							KW_SERDEPROPERTIES330=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_alterStatementSuffixSerdeProperties6067); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES330);

							pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixSerdeProperties6069);
							tableProperties331=tableProperties();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties331.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: tableProperties, serdeName
					// token labels: serdeName
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_serdeName=new RewriteRuleTokenStream(adaptor,"token serdeName",serdeName);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1363:5: -> ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1363:8: ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_SERIALIZER, "TOK_ALTERTABLE_SERIALIZER"), root_1);
						adaptor.addChild(root_1, stream_serdeName.nextNode());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1363:47: ( tableProperties )?
						if ( stream_tableProperties.hasNext() ) {
							adaptor.addChild(root_1, stream_tableProperties.nextTree());
						}
						stream_tableProperties.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1364:7: KW_SET KW_SERDEPROPERTIES tableProperties
					{
					KW_SET332=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixSerdeProperties6095); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET332);

					KW_SERDEPROPERTIES333=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_alterStatementSuffixSerdeProperties6097); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES333);

					pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixSerdeProperties6099);
					tableProperties334=tableProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties334.getTree());
					// AST REWRITE
					// elements: tableProperties
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1365:5: -> ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1365:8: ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_SERDEPROPERTIES, "TOK_ALTERTABLE_SERDEPROPERTIES"), root_1);
						adaptor.addChild(root_1, stream_tableProperties.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixSerdeProperties"


	public static class tablePartitionPrefix_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tablePartitionPrefix"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1368:1: tablePartitionPrefix : tableName ( partitionSpec )? -> ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? ) ;
	public final HiveParser.tablePartitionPrefix_return tablePartitionPrefix() throws RecognitionException {
		HiveParser.tablePartitionPrefix_return retval = new HiveParser.tablePartitionPrefix_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope tableName335 =null;
		ParserRuleReturnScope partitionSpec336 =null;

		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		pushMsg("table partition prefix", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1371:3: ( tableName ( partitionSpec )? -> ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1371:5: tableName ( partitionSpec )?
			{
			pushFollow(FOLLOW_tableName_in_tablePartitionPrefix6136);
			tableName335=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName335.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1371:15: ( partitionSpec )?
			int alt99=2;
			int LA99_0 = input.LA(1);
			if ( (LA99_0==KW_PARTITION) ) {
				alt99=1;
			}
			switch (alt99) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1371:15: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_tablePartitionPrefix6138);
					partitionSpec336=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec336.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: tableName, partitionSpec
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1372:3: -> ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1372:5: ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLE_PARTITION, "TOK_TABLE_PARTITION"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1372:37: ( partitionSpec )?
				if ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tablePartitionPrefix"


	public static class alterStatementSuffixFileFormat_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixFileFormat"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1375:1: alterStatementSuffixFileFormat : KW_SET KW_FILEFORMAT fileFormat -> ^( TOK_ALTERTABLE_FILEFORMAT fileFormat ) ;
	public final HiveParser.alterStatementSuffixFileFormat_return alterStatementSuffixFileFormat() throws RecognitionException {
		HiveParser.alterStatementSuffixFileFormat_return retval = new HiveParser.alterStatementSuffixFileFormat_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET337=null;
		Token KW_FILEFORMAT338=null;
		ParserRuleReturnScope fileFormat339 =null;

		ASTNode KW_SET337_tree=null;
		ASTNode KW_FILEFORMAT338_tree=null;
		RewriteRuleTokenStream stream_KW_FILEFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_FILEFORMAT");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_fileFormat=new RewriteRuleSubtreeStream(adaptor,"rule fileFormat");

		pushMsg("alter fileformat statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1378:2: ( KW_SET KW_FILEFORMAT fileFormat -> ^( TOK_ALTERTABLE_FILEFORMAT fileFormat ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1378:4: KW_SET KW_FILEFORMAT fileFormat
			{
			KW_SET337=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixFileFormat6173); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET337);

			KW_FILEFORMAT338=(Token)match(input,KW_FILEFORMAT,FOLLOW_KW_FILEFORMAT_in_alterStatementSuffixFileFormat6175); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FILEFORMAT.add(KW_FILEFORMAT338);

			pushFollow(FOLLOW_fileFormat_in_alterStatementSuffixFileFormat6177);
			fileFormat339=fileFormat();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_fileFormat.add(fileFormat339.getTree());
			// AST REWRITE
			// elements: fileFormat
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1379:2: -> ^( TOK_ALTERTABLE_FILEFORMAT fileFormat )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:5: ^( TOK_ALTERTABLE_FILEFORMAT fileFormat )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_FILEFORMAT, "TOK_ALTERTABLE_FILEFORMAT"), root_1);
				adaptor.addChild(root_1, stream_fileFormat.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixFileFormat"


	public static class alterStatementSuffixClusterbySortby_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixClusterbySortby"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1382:1: alterStatementSuffixClusterbySortby : ( KW_NOT KW_CLUSTERED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED ) | KW_NOT KW_SORTED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED ) | tableBuckets -> ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets ) );
	public final HiveParser.alterStatementSuffixClusterbySortby_return alterStatementSuffixClusterbySortby() throws RecognitionException {
		HiveParser.alterStatementSuffixClusterbySortby_return retval = new HiveParser.alterStatementSuffixClusterbySortby_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_NOT340=null;
		Token KW_CLUSTERED341=null;
		Token KW_NOT342=null;
		Token KW_SORTED343=null;
		ParserRuleReturnScope tableBuckets344 =null;

		ASTNode KW_NOT340_tree=null;
		ASTNode KW_CLUSTERED341_tree=null;
		ASTNode KW_NOT342_tree=null;
		ASTNode KW_SORTED343_tree=null;
		RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
		RewriteRuleTokenStream stream_KW_SORTED=new RewriteRuleTokenStream(adaptor,"token KW_SORTED");
		RewriteRuleTokenStream stream_KW_CLUSTERED=new RewriteRuleTokenStream(adaptor,"token KW_CLUSTERED");
		RewriteRuleSubtreeStream stream_tableBuckets=new RewriteRuleSubtreeStream(adaptor,"rule tableBuckets");

		pushMsg("alter partition cluster by sort by statement", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1385:3: ( KW_NOT KW_CLUSTERED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED ) | KW_NOT KW_SORTED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED ) | tableBuckets -> ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets ) )
			int alt100=3;
			int LA100_0 = input.LA(1);
			if ( (LA100_0==KW_NOT) ) {
				int LA100_1 = input.LA(2);
				if ( (LA100_1==KW_CLUSTERED) ) {
					alt100=1;
				}
				else if ( (LA100_1==KW_SORTED) ) {
					alt100=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 100, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}
			else if ( (LA100_0==KW_CLUSTERED) ) {
				alt100=3;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 100, 0, input);
				throw nvae;
			}

			switch (alt100) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1385:5: KW_NOT KW_CLUSTERED
					{
					KW_NOT340=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixClusterbySortby6208); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT340);

					KW_CLUSTERED341=(Token)match(input,KW_CLUSTERED,FOLLOW_KW_CLUSTERED_in_alterStatementSuffixClusterbySortby6210); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CLUSTERED.add(KW_CLUSTERED341);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1385:25: -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1385:28: ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_CLUSTER_SORT, "TOK_ALTERTABLE_CLUSTER_SORT"), root_1);
						adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_NOT_CLUSTERED, "TOK_NOT_CLUSTERED"));
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1386:5: KW_NOT KW_SORTED
					{
					KW_NOT342=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixClusterbySortby6224); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT342);

					KW_SORTED343=(Token)match(input,KW_SORTED,FOLLOW_KW_SORTED_in_alterStatementSuffixClusterbySortby6226); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SORTED.add(KW_SORTED343);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1386:22: -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1386:25: ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_CLUSTER_SORT, "TOK_ALTERTABLE_CLUSTER_SORT"), root_1);
						adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_NOT_SORTED, "TOK_NOT_SORTED"));
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1387:5: tableBuckets
					{
					pushFollow(FOLLOW_tableBuckets_in_alterStatementSuffixClusterbySortby6240);
					tableBuckets344=tableBuckets();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableBuckets.add(tableBuckets344.getTree());
					// AST REWRITE
					// elements: tableBuckets
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1387:18: -> ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1387:21: ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_CLUSTER_SORT, "TOK_ALTERTABLE_CLUSTER_SORT"), root_1);
						adaptor.addChild(root_1, stream_tableBuckets.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixClusterbySortby"


	public static class alterTblPartitionStatementSuffixSkewedLocation_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterTblPartitionStatementSuffixSkewedLocation"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1390:1: alterTblPartitionStatementSuffixSkewedLocation : KW_SET KW_SKEWED KW_LOCATION skewedLocations -> ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations ) ;
	public final HiveParser.alterTblPartitionStatementSuffixSkewedLocation_return alterTblPartitionStatementSuffixSkewedLocation() throws RecognitionException {
		HiveParser.alterTblPartitionStatementSuffixSkewedLocation_return retval = new HiveParser.alterTblPartitionStatementSuffixSkewedLocation_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET345=null;
		Token KW_SKEWED346=null;
		Token KW_LOCATION347=null;
		ParserRuleReturnScope skewedLocations348 =null;

		ASTNode KW_SET345_tree=null;
		ASTNode KW_SKEWED346_tree=null;
		ASTNode KW_LOCATION347_tree=null;
		RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");
		RewriteRuleTokenStream stream_KW_SKEWED=new RewriteRuleTokenStream(adaptor,"token KW_SKEWED");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_skewedLocations=new RewriteRuleSubtreeStream(adaptor,"rule skewedLocations");

		pushMsg("alter partition skewed location", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1393:3: ( KW_SET KW_SKEWED KW_LOCATION skewedLocations -> ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1393:5: KW_SET KW_SKEWED KW_LOCATION skewedLocations
			{
			KW_SET345=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterTblPartitionStatementSuffixSkewedLocation6271); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET345);

			KW_SKEWED346=(Token)match(input,KW_SKEWED,FOLLOW_KW_SKEWED_in_alterTblPartitionStatementSuffixSkewedLocation6273); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SKEWED.add(KW_SKEWED346);

			KW_LOCATION347=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_alterTblPartitionStatementSuffixSkewedLocation6275); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCATION.add(KW_LOCATION347);

			pushFollow(FOLLOW_skewedLocations_in_alterTblPartitionStatementSuffixSkewedLocation6277);
			skewedLocations348=skewedLocations();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedLocations.add(skewedLocations348.getTree());
			// AST REWRITE
			// elements: skewedLocations
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1394:3: -> ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1394:6: ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_SKEWED_LOCATION, "TOK_ALTERTABLE_SKEWED_LOCATION"), root_1);
				adaptor.addChild(root_1, stream_skewedLocations.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterTblPartitionStatementSuffixSkewedLocation"


	public static class skewedLocations_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedLocations"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1397:1: skewedLocations : LPAREN skewedLocationsList RPAREN -> ^( TOK_SKEWED_LOCATIONS skewedLocationsList ) ;
	public final HiveParser.skewedLocations_return skewedLocations() throws RecognitionException {
		HiveParser.skewedLocations_return retval = new HiveParser.skewedLocations_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN349=null;
		Token RPAREN351=null;
		ParserRuleReturnScope skewedLocationsList350 =null;

		ASTNode LPAREN349_tree=null;
		ASTNode RPAREN351_tree=null;
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_skewedLocationsList=new RewriteRuleSubtreeStream(adaptor,"rule skewedLocationsList");

		 pushMsg("skewed locations", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1400:5: ( LPAREN skewedLocationsList RPAREN -> ^( TOK_SKEWED_LOCATIONS skewedLocationsList ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1401:7: LPAREN skewedLocationsList RPAREN
			{
			LPAREN349=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_skewedLocations6320); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN349);

			pushFollow(FOLLOW_skewedLocationsList_in_skewedLocations6322);
			skewedLocationsList350=skewedLocationsList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedLocationsList.add(skewedLocationsList350.getTree());
			RPAREN351=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_skewedLocations6324); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN351);

			// AST REWRITE
			// elements: skewedLocationsList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1401:41: -> ^( TOK_SKEWED_LOCATIONS skewedLocationsList )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1401:44: ^( TOK_SKEWED_LOCATIONS skewedLocationsList )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SKEWED_LOCATIONS, "TOK_SKEWED_LOCATIONS"), root_1);
				adaptor.addChild(root_1, stream_skewedLocationsList.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedLocations"


	public static class skewedLocationsList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedLocationsList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1404:1: skewedLocationsList : skewedLocationMap ( COMMA skewedLocationMap )* -> ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ ) ;
	public final HiveParser.skewedLocationsList_return skewedLocationsList() throws RecognitionException {
		HiveParser.skewedLocationsList_return retval = new HiveParser.skewedLocationsList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA353=null;
		ParserRuleReturnScope skewedLocationMap352 =null;
		ParserRuleReturnScope skewedLocationMap354 =null;

		ASTNode COMMA353_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_skewedLocationMap=new RewriteRuleSubtreeStream(adaptor,"rule skewedLocationMap");

		 pushMsg("skewed locations list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1407:5: ( skewedLocationMap ( COMMA skewedLocationMap )* -> ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1408:7: skewedLocationMap ( COMMA skewedLocationMap )*
			{
			pushFollow(FOLLOW_skewedLocationMap_in_skewedLocationsList6365);
			skewedLocationMap352=skewedLocationMap();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedLocationMap.add(skewedLocationMap352.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1408:25: ( COMMA skewedLocationMap )*
			loop101:
			while (true) {
				int alt101=2;
				int LA101_0 = input.LA(1);
				if ( (LA101_0==COMMA) ) {
					alt101=1;
				}

				switch (alt101) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1408:26: COMMA skewedLocationMap
					{
					COMMA353=(Token)match(input,COMMA,FOLLOW_COMMA_in_skewedLocationsList6368); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA353);

					pushFollow(FOLLOW_skewedLocationMap_in_skewedLocationsList6370);
					skewedLocationMap354=skewedLocationMap();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_skewedLocationMap.add(skewedLocationMap354.getTree());
					}
					break;

				default :
					break loop101;
				}
			}

			// AST REWRITE
			// elements: skewedLocationMap
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1408:52: -> ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1408:55: ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SKEWED_LOCATION_LIST, "TOK_SKEWED_LOCATION_LIST"), root_1);
				if ( !(stream_skewedLocationMap.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_skewedLocationMap.hasNext() ) {
					adaptor.addChild(root_1, stream_skewedLocationMap.nextTree());
				}
				stream_skewedLocationMap.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedLocationsList"


	public static class skewedLocationMap_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedLocationMap"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1411:1: skewedLocationMap : key= skewedValueLocationElement EQUAL value= StringLiteral -> ^( TOK_SKEWED_LOCATION_MAP $key $value) ;
	public final HiveParser.skewedLocationMap_return skewedLocationMap() throws RecognitionException {
		HiveParser.skewedLocationMap_return retval = new HiveParser.skewedLocationMap_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token value=null;
		Token EQUAL355=null;
		ParserRuleReturnScope key =null;

		ASTNode value_tree=null;
		ASTNode EQUAL355_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_EQUAL=new RewriteRuleTokenStream(adaptor,"token EQUAL");
		RewriteRuleSubtreeStream stream_skewedValueLocationElement=new RewriteRuleSubtreeStream(adaptor,"rule skewedValueLocationElement");

		 pushMsg("specifying skewed location map", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1414:5: (key= skewedValueLocationElement EQUAL value= StringLiteral -> ^( TOK_SKEWED_LOCATION_MAP $key $value) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1415:7: key= skewedValueLocationElement EQUAL value= StringLiteral
			{
			pushFollow(FOLLOW_skewedValueLocationElement_in_skewedLocationMap6416);
			key=skewedValueLocationElement();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedValueLocationElement.add(key.getTree());
			EQUAL355=(Token)match(input,EQUAL,FOLLOW_EQUAL_in_skewedLocationMap6418); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_EQUAL.add(EQUAL355);

			value=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_skewedLocationMap6422); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(value);

			// AST REWRITE
			// elements: value, key
			// token labels: value
			// rule labels: key, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_value=new RewriteRuleTokenStream(adaptor,"token value",value);
			RewriteRuleSubtreeStream stream_key=new RewriteRuleSubtreeStream(adaptor,"rule key",key!=null?key.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1415:64: -> ^( TOK_SKEWED_LOCATION_MAP $key $value)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1415:67: ^( TOK_SKEWED_LOCATION_MAP $key $value)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SKEWED_LOCATION_MAP, "TOK_SKEWED_LOCATION_MAP"), root_1);
				adaptor.addChild(root_1, stream_key.nextTree());
				adaptor.addChild(root_1, stream_value.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedLocationMap"


	public static class alterStatementSuffixLocation_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixLocation"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1418:1: alterStatementSuffixLocation : KW_SET KW_LOCATION newLoc= StringLiteral -> ^( TOK_ALTERTABLE_LOCATION $newLoc) ;
	public final HiveParser.alterStatementSuffixLocation_return alterStatementSuffixLocation() throws RecognitionException {
		HiveParser.alterStatementSuffixLocation_return retval = new HiveParser.alterStatementSuffixLocation_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token newLoc=null;
		Token KW_SET356=null;
		Token KW_LOCATION357=null;

		ASTNode newLoc_tree=null;
		ASTNode KW_SET356_tree=null;
		ASTNode KW_LOCATION357_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");

		pushMsg("alter location", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1421:3: ( KW_SET KW_LOCATION newLoc= StringLiteral -> ^( TOK_ALTERTABLE_LOCATION $newLoc) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1421:5: KW_SET KW_LOCATION newLoc= StringLiteral
			{
			KW_SET356=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixLocation6459); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET356);

			KW_LOCATION357=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_alterStatementSuffixLocation6461); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCATION.add(KW_LOCATION357);

			newLoc=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixLocation6465); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(newLoc);

			// AST REWRITE
			// elements: newLoc
			// token labels: newLoc
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_newLoc=new RewriteRuleTokenStream(adaptor,"token newLoc",newLoc);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1422:3: -> ^( TOK_ALTERTABLE_LOCATION $newLoc)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1422:6: ^( TOK_ALTERTABLE_LOCATION $newLoc)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_LOCATION, "TOK_ALTERTABLE_LOCATION"), root_1);
				adaptor.addChild(root_1, stream_newLoc.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixLocation"


	public static class alterStatementSuffixSkewedby_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixSkewedby"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1426:1: alterStatementSuffixSkewedby : ( tableSkewed -> ^( TOK_ALTERTABLE_SKEWED tableSkewed ) | KW_NOT KW_SKEWED -> ^( TOK_ALTERTABLE_SKEWED ) | KW_NOT storedAsDirs -> ^( TOK_ALTERTABLE_SKEWED storedAsDirs ) );
	public final HiveParser.alterStatementSuffixSkewedby_return alterStatementSuffixSkewedby() throws RecognitionException {
		HiveParser.alterStatementSuffixSkewedby_return retval = new HiveParser.alterStatementSuffixSkewedby_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_NOT359=null;
		Token KW_SKEWED360=null;
		Token KW_NOT361=null;
		ParserRuleReturnScope tableSkewed358 =null;
		ParserRuleReturnScope storedAsDirs362 =null;

		ASTNode KW_NOT359_tree=null;
		ASTNode KW_SKEWED360_tree=null;
		ASTNode KW_NOT361_tree=null;
		RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
		RewriteRuleTokenStream stream_KW_SKEWED=new RewriteRuleTokenStream(adaptor,"token KW_SKEWED");
		RewriteRuleSubtreeStream stream_tableSkewed=new RewriteRuleSubtreeStream(adaptor,"rule tableSkewed");
		RewriteRuleSubtreeStream stream_storedAsDirs=new RewriteRuleSubtreeStream(adaptor,"rule storedAsDirs");

		pushMsg("alter skewed by statement", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1429:2: ( tableSkewed -> ^( TOK_ALTERTABLE_SKEWED tableSkewed ) | KW_NOT KW_SKEWED -> ^( TOK_ALTERTABLE_SKEWED ) | KW_NOT storedAsDirs -> ^( TOK_ALTERTABLE_SKEWED storedAsDirs ) )
			int alt102=3;
			int LA102_0 = input.LA(1);
			if ( (LA102_0==KW_SKEWED) ) {
				alt102=1;
			}
			else if ( (LA102_0==KW_NOT) ) {
				int LA102_2 = input.LA(2);
				if ( (LA102_2==KW_SKEWED) ) {
					alt102=2;
				}
				else if ( (LA102_2==KW_STORED) ) {
					alt102=3;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 102, 2, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 102, 0, input);
				throw nvae;
			}

			switch (alt102) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1429:4: tableSkewed
					{
					pushFollow(FOLLOW_tableSkewed_in_alterStatementSuffixSkewedby6499);
					tableSkewed358=tableSkewed();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableSkewed.add(tableSkewed358.getTree());
					// AST REWRITE
					// elements: tableSkewed
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1430:2: -> ^( TOK_ALTERTABLE_SKEWED tableSkewed )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1430:4: ^( TOK_ALTERTABLE_SKEWED tableSkewed )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_SKEWED, "TOK_ALTERTABLE_SKEWED"), root_1);
						adaptor.addChild(root_1, stream_tableSkewed.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1432:3: KW_NOT KW_SKEWED
					{
					KW_NOT359=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixSkewedby6514); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT359);

					KW_SKEWED360=(Token)match(input,KW_SKEWED,FOLLOW_KW_SKEWED_in_alterStatementSuffixSkewedby6516); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SKEWED.add(KW_SKEWED360);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1433:2: -> ^( TOK_ALTERTABLE_SKEWED )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1433:4: ^( TOK_ALTERTABLE_SKEWED )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_SKEWED, "TOK_ALTERTABLE_SKEWED"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1435:3: KW_NOT storedAsDirs
					{
					KW_NOT361=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixSkewedby6529); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT361);

					pushFollow(FOLLOW_storedAsDirs_in_alterStatementSuffixSkewedby6531);
					storedAsDirs362=storedAsDirs();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_storedAsDirs.add(storedAsDirs362.getTree());
					// AST REWRITE
					// elements: storedAsDirs
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1436:2: -> ^( TOK_ALTERTABLE_SKEWED storedAsDirs )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1436:4: ^( TOK_ALTERTABLE_SKEWED storedAsDirs )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_SKEWED, "TOK_ALTERTABLE_SKEWED"), root_1);
						adaptor.addChild(root_1, stream_storedAsDirs.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixSkewedby"


	public static class alterStatementSuffixExchangePartition_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixExchangePartition"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1439:1: alterStatementSuffixExchangePartition : KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename= tableName -> ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename) ;
	public final HiveParser.alterStatementSuffixExchangePartition_return alterStatementSuffixExchangePartition() throws RecognitionException {
		HiveParser.alterStatementSuffixExchangePartition_return retval = new HiveParser.alterStatementSuffixExchangePartition_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_EXCHANGE363=null;
		Token KW_WITH365=null;
		Token KW_TABLE366=null;
		ParserRuleReturnScope exchangename =null;
		ParserRuleReturnScope partitionSpec364 =null;

		ASTNode KW_EXCHANGE363_tree=null;
		ASTNode KW_WITH365_tree=null;
		ASTNode KW_TABLE366_tree=null;
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_EXCHANGE=new RewriteRuleTokenStream(adaptor,"token KW_EXCHANGE");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		pushMsg("alter exchange partition", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1442:5: ( KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename= tableName -> ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1442:7: KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename= tableName
			{
			KW_EXCHANGE363=(Token)match(input,KW_EXCHANGE,FOLLOW_KW_EXCHANGE_in_alterStatementSuffixExchangePartition6562); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_EXCHANGE.add(KW_EXCHANGE363);

			pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixExchangePartition6564);
			partitionSpec364=partitionSpec();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec364.getTree());
			KW_WITH365=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_alterStatementSuffixExchangePartition6566); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH365);

			KW_TABLE366=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_alterStatementSuffixExchangePartition6568); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE366);

			pushFollow(FOLLOW_tableName_in_alterStatementSuffixExchangePartition6572);
			exchangename=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(exchangename.getTree());
			// AST REWRITE
			// elements: exchangename, partitionSpec
			// token labels: 
			// rule labels: exchangename, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_exchangename=new RewriteRuleSubtreeStream(adaptor,"rule exchangename",exchangename!=null?exchangename.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1443:5: -> ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1443:8: ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_EXCHANGEPARTITION, "TOK_ALTERTABLE_EXCHANGEPARTITION"), root_1);
				adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				adaptor.addChild(root_1, stream_exchangename.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixExchangePartition"


	public static class alterStatementSuffixRenamePart_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixRenamePart"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1446:1: alterStatementSuffixRenamePart : KW_RENAME KW_TO partitionSpec -> ^( TOK_ALTERTABLE_RENAMEPART partitionSpec ) ;
	public final HiveParser.alterStatementSuffixRenamePart_return alterStatementSuffixRenamePart() throws RecognitionException {
		HiveParser.alterStatementSuffixRenamePart_return retval = new HiveParser.alterStatementSuffixRenamePart_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_RENAME367=null;
		Token KW_TO368=null;
		ParserRuleReturnScope partitionSpec369 =null;

		ASTNode KW_RENAME367_tree=null;
		ASTNode KW_TO368_tree=null;
		RewriteRuleTokenStream stream_KW_RENAME=new RewriteRuleTokenStream(adaptor,"token KW_RENAME");
		RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");

		 pushMsg("alter table rename partition statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1449:5: ( KW_RENAME KW_TO partitionSpec -> ^( TOK_ALTERTABLE_RENAMEPART partitionSpec ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1449:7: KW_RENAME KW_TO partitionSpec
			{
			KW_RENAME367=(Token)match(input,KW_RENAME,FOLLOW_KW_RENAME_in_alterStatementSuffixRenamePart6614); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_RENAME.add(KW_RENAME367);

			KW_TO368=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_alterStatementSuffixRenamePart6616); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TO.add(KW_TO368);

			pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixRenamePart6618);
			partitionSpec369=partitionSpec();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec369.getTree());
			// AST REWRITE
			// elements: partitionSpec
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1450:5: -> ^( TOK_ALTERTABLE_RENAMEPART partitionSpec )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1450:7: ^( TOK_ALTERTABLE_RENAMEPART partitionSpec )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_RENAMEPART, "TOK_ALTERTABLE_RENAMEPART"), root_1);
				adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixRenamePart"


	public static class alterStatementSuffixStatsPart_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixStatsPart"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1453:1: alterStatementSuffixStatsPart : KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) ;
	public final HiveParser.alterStatementSuffixStatsPart_return alterStatementSuffixStatsPart() throws RecognitionException {
		HiveParser.alterStatementSuffixStatsPart_return retval = new HiveParser.alterStatementSuffixStatsPart_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_UPDATE370=null;
		Token KW_STATISTICS371=null;
		Token KW_FOR372=null;
		Token KW_COLUMN373=null;
		Token KW_SET374=null;
		Token KW_COMMENT376=null;
		ParserRuleReturnScope colName =null;
		ParserRuleReturnScope tableProperties375 =null;

		ASTNode comment_tree=null;
		ASTNode KW_UPDATE370_tree=null;
		ASTNode KW_STATISTICS371_tree=null;
		ASTNode KW_FOR372_tree=null;
		ASTNode KW_COLUMN373_tree=null;
		ASTNode KW_SET374_tree=null;
		ASTNode KW_COMMENT376_tree=null;
		RewriteRuleTokenStream stream_KW_STATISTICS=new RewriteRuleTokenStream(adaptor,"token KW_STATISTICS");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
		RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
		RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");

		 pushMsg("alter table stats partition statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1456:5: ( KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1456:7: KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )?
			{
			KW_UPDATE370=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_alterStatementSuffixStatsPart6656); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UPDATE.add(KW_UPDATE370);

			KW_STATISTICS371=(Token)match(input,KW_STATISTICS,FOLLOW_KW_STATISTICS_in_alterStatementSuffixStatsPart6658); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_STATISTICS.add(KW_STATISTICS371);

			KW_FOR372=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_alterStatementSuffixStatsPart6660); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FOR.add(KW_FOR372);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1456:38: ( KW_COLUMN )?
			int alt103=2;
			int LA103_0 = input.LA(1);
			if ( (LA103_0==KW_COLUMN) ) {
				alt103=1;
			}
			switch (alt103) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1456:38: KW_COLUMN
					{
					KW_COLUMN373=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementSuffixStatsPart6662); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COLUMN.add(KW_COLUMN373);

					}
					break;

			}

			pushFollow(FOLLOW_identifier_in_alterStatementSuffixStatsPart6667);
			colName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(colName.getTree());
			KW_SET374=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixStatsPart6669); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET374);

			pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixStatsPart6671);
			tableProperties375=tableProperties();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties375.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1456:91: ( KW_COMMENT comment= StringLiteral )?
			int alt104=2;
			int LA104_0 = input.LA(1);
			if ( (LA104_0==KW_COMMENT) ) {
				alt104=1;
			}
			switch (alt104) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1456:92: KW_COMMENT comment= StringLiteral
					{
					KW_COMMENT376=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_alterStatementSuffixStatsPart6674); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT376);

					comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixStatsPart6678); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

					}
					break;

			}

			// AST REWRITE
			// elements: tableProperties, colName, comment
			// token labels: comment
			// rule labels: colName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1457:5: -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1457:7: ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_UPDATECOLSTATS, "TOK_ALTERTABLE_UPDATECOLSTATS"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, stream_tableProperties.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1457:65: ( $comment)?
				if ( stream_comment.hasNext() ) {
					adaptor.addChild(root_1, stream_comment.nextNode());
				}
				stream_comment.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixStatsPart"


	public static class alterStatementSuffixMergeFiles_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixMergeFiles"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1460:1: alterStatementSuffixMergeFiles : KW_CONCATENATE -> ^( TOK_ALTERTABLE_MERGEFILES ) ;
	public final HiveParser.alterStatementSuffixMergeFiles_return alterStatementSuffixMergeFiles() throws RecognitionException {
		HiveParser.alterStatementSuffixMergeFiles_return retval = new HiveParser.alterStatementSuffixMergeFiles_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONCATENATE377=null;

		ASTNode KW_CONCATENATE377_tree=null;
		RewriteRuleTokenStream stream_KW_CONCATENATE=new RewriteRuleTokenStream(adaptor,"token KW_CONCATENATE");

		 pushMsg("", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1463:5: ( KW_CONCATENATE -> ^( TOK_ALTERTABLE_MERGEFILES ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1463:7: KW_CONCATENATE
			{
			KW_CONCATENATE377=(Token)match(input,KW_CONCATENATE,FOLLOW_KW_CONCATENATE_in_alterStatementSuffixMergeFiles6725); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CONCATENATE.add(KW_CONCATENATE377);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1464:5: -> ^( TOK_ALTERTABLE_MERGEFILES )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1464:8: ^( TOK_ALTERTABLE_MERGEFILES )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_MERGEFILES, "TOK_ALTERTABLE_MERGEFILES"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixMergeFiles"


	public static class alterStatementSuffixBucketNum_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixBucketNum"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1467:1: alterStatementSuffixBucketNum : KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $num) ;
	public final HiveParser.alterStatementSuffixBucketNum_return alterStatementSuffixBucketNum() throws RecognitionException {
		HiveParser.alterStatementSuffixBucketNum_return retval = new HiveParser.alterStatementSuffixBucketNum_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token num=null;
		Token KW_INTO378=null;
		Token KW_BUCKETS379=null;

		ASTNode num_tree=null;
		ASTNode KW_INTO378_tree=null;
		ASTNode KW_BUCKETS379_tree=null;
		RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
		RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
		RewriteRuleTokenStream stream_KW_BUCKETS=new RewriteRuleTokenStream(adaptor,"token KW_BUCKETS");

		 pushMsg("", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1470:5: ( KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $num) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1470:7: KW_INTO num= Number KW_BUCKETS
			{
			KW_INTO378=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_alterStatementSuffixBucketNum6762); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_INTO.add(KW_INTO378);

			num=(Token)match(input,Number,FOLLOW_Number_in_alterStatementSuffixBucketNum6766); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_Number.add(num);

			KW_BUCKETS379=(Token)match(input,KW_BUCKETS,FOLLOW_KW_BUCKETS_in_alterStatementSuffixBucketNum6768); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BUCKETS.add(KW_BUCKETS379);

			// AST REWRITE
			// elements: num
			// token labels: num
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_num=new RewriteRuleTokenStream(adaptor,"token num",num);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1471:5: -> ^( TOK_ALTERTABLE_BUCKETS $num)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1471:8: ^( TOK_ALTERTABLE_BUCKETS $num)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_BUCKETS, "TOK_ALTERTABLE_BUCKETS"), root_1);
				adaptor.addChild(root_1, stream_num.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixBucketNum"


	public static class blocking_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "blocking"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1474:1: blocking : KW_AND KW_WAIT -> TOK_BLOCKING ;
	public final HiveParser.blocking_return blocking() throws RecognitionException {
		HiveParser.blocking_return retval = new HiveParser.blocking_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_AND380=null;
		Token KW_WAIT381=null;

		ASTNode KW_AND380_tree=null;
		ASTNode KW_WAIT381_tree=null;
		RewriteRuleTokenStream stream_KW_WAIT=new RewriteRuleTokenStream(adaptor,"token KW_WAIT");
		RewriteRuleTokenStream stream_KW_AND=new RewriteRuleTokenStream(adaptor,"token KW_AND");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1475:3: ( KW_AND KW_WAIT -> TOK_BLOCKING )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1475:5: KW_AND KW_WAIT
			{
			KW_AND380=(Token)match(input,KW_AND,FOLLOW_KW_AND_in_blocking6796); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AND.add(KW_AND380);

			KW_WAIT381=(Token)match(input,KW_WAIT,FOLLOW_KW_WAIT_in_blocking6798); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WAIT.add(KW_WAIT381);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1476:3: -> TOK_BLOCKING
			{
				adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_BLOCKING, "TOK_BLOCKING"));
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "blocking"


	public static class alterStatementSuffixCompact_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixCompact"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1479:1: alterStatementSuffixCompact : KW_COMPACT compactType= StringLiteral ( blocking )? ( KW_WITH KW_OVERWRITE KW_TBLPROPERTIES tableProperties )? -> ^( TOK_ALTERTABLE_COMPACT $compactType ( blocking )? ( tableProperties )? ) ;
	public final HiveParser.alterStatementSuffixCompact_return alterStatementSuffixCompact() throws RecognitionException {
		HiveParser.alterStatementSuffixCompact_return retval = new HiveParser.alterStatementSuffixCompact_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token compactType=null;
		Token KW_COMPACT382=null;
		Token KW_WITH384=null;
		Token KW_OVERWRITE385=null;
		Token KW_TBLPROPERTIES386=null;
		ParserRuleReturnScope blocking383 =null;
		ParserRuleReturnScope tableProperties387 =null;

		ASTNode compactType_tree=null;
		ASTNode KW_COMPACT382_tree=null;
		ASTNode KW_WITH384_tree=null;
		ASTNode KW_OVERWRITE385_tree=null;
		ASTNode KW_TBLPROPERTIES386_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_COMPACT=new RewriteRuleTokenStream(adaptor,"token KW_COMPACT");
		RewriteRuleTokenStream stream_KW_OVERWRITE=new RewriteRuleTokenStream(adaptor,"token KW_OVERWRITE");
		RewriteRuleTokenStream stream_KW_TBLPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_TBLPROPERTIES");
		RewriteRuleSubtreeStream stream_blocking=new RewriteRuleSubtreeStream(adaptor,"rule blocking");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");

		 msgs.push("compaction request"); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1482:5: ( KW_COMPACT compactType= StringLiteral ( blocking )? ( KW_WITH KW_OVERWRITE KW_TBLPROPERTIES tableProperties )? -> ^( TOK_ALTERTABLE_COMPACT $compactType ( blocking )? ( tableProperties )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1482:7: KW_COMPACT compactType= StringLiteral ( blocking )? ( KW_WITH KW_OVERWRITE KW_TBLPROPERTIES tableProperties )?
			{
			KW_COMPACT382=(Token)match(input,KW_COMPACT,FOLLOW_KW_COMPACT_in_alterStatementSuffixCompact6829); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COMPACT.add(KW_COMPACT382);

			compactType=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixCompact6833); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(compactType);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1482:44: ( blocking )?
			int alt105=2;
			int LA105_0 = input.LA(1);
			if ( (LA105_0==KW_AND) ) {
				alt105=1;
			}
			switch (alt105) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1482:44: blocking
					{
					pushFollow(FOLLOW_blocking_in_alterStatementSuffixCompact6835);
					blocking383=blocking();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_blocking.add(blocking383.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1482:54: ( KW_WITH KW_OVERWRITE KW_TBLPROPERTIES tableProperties )?
			int alt106=2;
			int LA106_0 = input.LA(1);
			if ( (LA106_0==KW_WITH) ) {
				alt106=1;
			}
			switch (alt106) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1482:55: KW_WITH KW_OVERWRITE KW_TBLPROPERTIES tableProperties
					{
					KW_WITH384=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_alterStatementSuffixCompact6839); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH384);

					KW_OVERWRITE385=(Token)match(input,KW_OVERWRITE,FOLLOW_KW_OVERWRITE_in_alterStatementSuffixCompact6841); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_OVERWRITE.add(KW_OVERWRITE385);

					KW_TBLPROPERTIES386=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterStatementSuffixCompact6843); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES386);

					pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixCompact6845);
					tableProperties387=tableProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableProperties.add(tableProperties387.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: compactType, tableProperties, blocking
			// token labels: compactType
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_compactType=new RewriteRuleTokenStream(adaptor,"token compactType",compactType);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1483:5: -> ^( TOK_ALTERTABLE_COMPACT $compactType ( blocking )? ( tableProperties )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1483:8: ^( TOK_ALTERTABLE_COMPACT $compactType ( blocking )? ( tableProperties )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_COMPACT, "TOK_ALTERTABLE_COMPACT"), root_1);
				adaptor.addChild(root_1, stream_compactType.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1483:46: ( blocking )?
				if ( stream_blocking.hasNext() ) {
					adaptor.addChild(root_1, stream_blocking.nextTree());
				}
				stream_blocking.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1483:56: ( tableProperties )?
				if ( stream_tableProperties.hasNext() ) {
					adaptor.addChild(root_1, stream_tableProperties.nextTree());
				}
				stream_tableProperties.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { msgs.pop(); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixCompact"


	public static class alterStatementSuffixSetOwner_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterStatementSuffixSetOwner"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1486:1: alterStatementSuffixSetOwner : KW_SET KW_OWNER principalName -> ^( TOK_ALTERTABLE_OWNER principalName ) ;
	public final HiveParser.alterStatementSuffixSetOwner_return alterStatementSuffixSetOwner() throws RecognitionException {
		HiveParser.alterStatementSuffixSetOwner_return retval = new HiveParser.alterStatementSuffixSetOwner_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET388=null;
		Token KW_OWNER389=null;
		ParserRuleReturnScope principalName390 =null;

		ASTNode KW_SET388_tree=null;
		ASTNode KW_OWNER389_tree=null;
		RewriteRuleTokenStream stream_KW_OWNER=new RewriteRuleTokenStream(adaptor,"token KW_OWNER");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");

		 pushMsg("alter table set owner", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1489:5: ( KW_SET KW_OWNER principalName -> ^( TOK_ALTERTABLE_OWNER principalName ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1489:7: KW_SET KW_OWNER principalName
			{
			KW_SET388=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixSetOwner6893); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET388);

			KW_OWNER389=(Token)match(input,KW_OWNER,FOLLOW_KW_OWNER_in_alterStatementSuffixSetOwner6895); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_OWNER.add(KW_OWNER389);

			pushFollow(FOLLOW_principalName_in_alterStatementSuffixSetOwner6897);
			principalName390=principalName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalName.add(principalName390.getTree());
			// AST REWRITE
			// elements: principalName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1490:5: -> ^( TOK_ALTERTABLE_OWNER principalName )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1490:8: ^( TOK_ALTERTABLE_OWNER principalName )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_OWNER, "TOK_ALTERTABLE_OWNER"), root_1);
				adaptor.addChild(root_1, stream_principalName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterStatementSuffixSetOwner"


	public static class fileFormat_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "fileFormat"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1493:1: fileFormat : ( KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral KW_SERDE serdeCls= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? ) |genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) );
	public final HiveParser.fileFormat_return fileFormat() throws RecognitionException {
		HiveParser.fileFormat_return retval = new HiveParser.fileFormat_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token inFmt=null;
		Token outFmt=null;
		Token serdeCls=null;
		Token inDriver=null;
		Token outDriver=null;
		Token KW_INPUTFORMAT391=null;
		Token KW_OUTPUTFORMAT392=null;
		Token KW_SERDE393=null;
		Token KW_INPUTDRIVER394=null;
		Token KW_OUTPUTDRIVER395=null;
		ParserRuleReturnScope genericSpec =null;

		ASTNode inFmt_tree=null;
		ASTNode outFmt_tree=null;
		ASTNode serdeCls_tree=null;
		ASTNode inDriver_tree=null;
		ASTNode outDriver_tree=null;
		ASTNode KW_INPUTFORMAT391_tree=null;
		ASTNode KW_OUTPUTFORMAT392_tree=null;
		ASTNode KW_SERDE393_tree=null;
		ASTNode KW_INPUTDRIVER394_tree=null;
		ASTNode KW_OUTPUTDRIVER395_tree=null;
		RewriteRuleTokenStream stream_KW_INPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_INPUTFORMAT");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_INPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_INPUTDRIVER");
		RewriteRuleTokenStream stream_KW_SERDE=new RewriteRuleTokenStream(adaptor,"token KW_SERDE");
		RewriteRuleTokenStream stream_KW_OUTPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTFORMAT");
		RewriteRuleTokenStream stream_KW_OUTPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTDRIVER");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		 pushMsg("file format specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1496:5: ( KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral KW_SERDE serdeCls= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? ) |genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) )
			int alt108=2;
			int LA108_0 = input.LA(1);
			if ( (LA108_0==KW_INPUTFORMAT) ) {
				int LA108_1 = input.LA(2);
				if ( (LA108_1==StringLiteral) ) {
					alt108=1;
				}
				else if ( (LA108_1==EOF) ) {
					alt108=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 108, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}
			else if ( (LA108_0==Identifier||(LA108_0 >= KW_ABORT && LA108_0 <= KW_AFTER)||LA108_0==KW_ALLOC_FRACTION||LA108_0==KW_ANALYZE||LA108_0==KW_ARCHIVE||LA108_0==KW_ASC||(LA108_0 >= KW_AUTOCOMMIT && LA108_0 <= KW_BEFORE)||(LA108_0 >= KW_BUCKET && LA108_0 <= KW_BUCKETS)||(LA108_0 >= KW_CACHE && LA108_0 <= KW_CASCADE)||LA108_0==KW_CHANGE||(LA108_0 >= KW_CHECK && LA108_0 <= KW_COLLECTION)||(LA108_0 >= KW_COLUMNS && LA108_0 <= KW_COMMENT)||(LA108_0 >= KW_COMPACT && LA108_0 <= KW_CONCATENATE)||LA108_0==KW_CONTINUE||LA108_0==KW_DATA||LA108_0==KW_DATABASES||(LA108_0 >= KW_DATETIME && LA108_0 <= KW_DBPROPERTIES)||(LA108_0 >= KW_DEFAULT && LA108_0 <= KW_DEFINED)||(LA108_0 >= KW_DELIMITED && LA108_0 <= KW_DESC)||(LA108_0 >= KW_DETAIL && LA108_0 <= KW_DISABLE)||(LA108_0 >= KW_DISTRIBUTE && LA108_0 <= KW_DO)||LA108_0==KW_DOW||(LA108_0 >= KW_DUMP && LA108_0 <= KW_ELEM_TYPE)||LA108_0==KW_ENABLE||(LA108_0 >= KW_ENFORCED && LA108_0 <= KW_ESCAPED)||LA108_0==KW_EXCLUSIVE||(LA108_0 >= KW_EXPLAIN && LA108_0 <= KW_EXPRESSION)||(LA108_0 >= KW_FIELDS && LA108_0 <= KW_FIRST)||(LA108_0 >= KW_FORMAT && LA108_0 <= KW_FORMATTED)||LA108_0==KW_FUNCTIONS||(LA108_0 >= KW_HOUR && LA108_0 <= KW_IDXPROPERTIES)||(LA108_0 >= KW_INDEX && LA108_0 <= KW_INDEXES)||(LA108_0 >= KW_INPATH && LA108_0 <= KW_INPUTDRIVER)||(LA108_0 >= KW_ISOLATION && LA108_0 <= KW_JAR)||(LA108_0 >= KW_KEY && LA108_0 <= KW_LAST)||LA108_0==KW_LEVEL||(LA108_0 >= KW_LIMIT && LA108_0 <= KW_LOAD)||(LA108_0 >= KW_LOCATION && LA108_0 <= KW_LONG)||LA108_0==KW_MANAGEMENT||(LA108_0 >= KW_MAPJOIN && LA108_0 <= KW_MATERIALIZED)||LA108_0==KW_METADATA||(LA108_0 >= KW_MINUTE && LA108_0 <= KW_MONTH)||(LA108_0 >= KW_MOVE && LA108_0 <= KW_MSCK)||(LA108_0 >= KW_NORELY && LA108_0 <= KW_NOSCAN)||LA108_0==KW_NOVALIDATE||LA108_0==KW_NULLS||LA108_0==KW_OFFSET||(LA108_0 >= KW_OPERATOR && LA108_0 <= KW_OPTION)||(LA108_0 >= KW_OUTPUTDRIVER && LA108_0 <= KW_OUTPUTFORMAT)||(LA108_0 >= KW_OVERWRITE && LA108_0 <= KW_OWNER)||(LA108_0 >= KW_PARTITIONED && LA108_0 <= KW_PATH)||(LA108_0 >= KW_PLAN && LA108_0 <= KW_POOL)||LA108_0==KW_PRINCIPALS||(LA108_0 >= KW_PURGE && LA108_0 <= KW_QUERY_PARALLELISM)||LA108_0==KW_READ||(LA108_0 >= KW_REBUILD && LA108_0 <= KW_RECORDWRITER)||(LA108_0 >= KW_RELOAD && LA108_0 <= KW_RESTRICT)||LA108_0==KW_REWRITE||(LA108_0 >= KW_ROLE && LA108_0 <= KW_ROLES)||(LA108_0 >= KW_SCHEDULING_POLICY && LA108_0 <= KW_SECOND)||(LA108_0 >= KW_SEMI && LA108_0 <= KW_SERVER)||(LA108_0 >= KW_SETS && LA108_0 <= KW_SKEWED)||(LA108_0 >= KW_SNAPSHOT && LA108_0 <= KW_SSL)||(LA108_0 >= KW_STATISTICS && LA108_0 <= KW_SUMMARY)||LA108_0==KW_TABLES||(LA108_0 >= KW_TBLPROPERTIES && LA108_0 <= KW_TERMINATED)||LA108_0==KW_TINYINT||(LA108_0 >= KW_TOUCH && LA108_0 <= KW_TRANSACTIONS)||LA108_0==KW_UNARCHIVE||LA108_0==KW_UNDO||LA108_0==KW_UNIONTYPE||(LA108_0 >= KW_UNLOCK && LA108_0 <= KW_UNSIGNED)||(LA108_0 >= KW_URI && LA108_0 <= KW_USE)||(LA108_0 >= KW_UTC && LA108_0 <= KW_VALIDATE)||LA108_0==KW_VALUE_TYPE||(LA108_0 >= KW_VECTORIZATION && LA108_0 <= KW_WEEK)||LA108_0==KW_WHILE||(LA108_0 >= KW_WORK && LA108_0 <= KW_ZONE)||LA108_0==KW_BATCH||LA108_0==KW_DAYOFWEEK||LA108_0==KW_HOLD_DDLTIME||LA108_0==KW_IGNORE||LA108_0==KW_NO_DROP||LA108_0==KW_OFFLINE||LA108_0==KW_PROTECTION||LA108_0==KW_READONLY||LA108_0==KW_TIMESTAMPTZ) ) {
				alt108=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 108, 0, input);
				throw nvae;
			}

			switch (alt108) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1496:7: KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral KW_SERDE serdeCls= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
					{
					KW_INPUTFORMAT391=(Token)match(input,KW_INPUTFORMAT,FOLLOW_KW_INPUTFORMAT_in_fileFormat6936); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INPUTFORMAT.add(KW_INPUTFORMAT391);

					inFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6940); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(inFmt);

					KW_OUTPUTFORMAT392=(Token)match(input,KW_OUTPUTFORMAT,FOLLOW_KW_OUTPUTFORMAT_in_fileFormat6942); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_OUTPUTFORMAT.add(KW_OUTPUTFORMAT392);

					outFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6946); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(outFmt);

					KW_SERDE393=(Token)match(input,KW_SERDE,FOLLOW_KW_SERDE_in_fileFormat6948); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SERDE.add(KW_SERDE393);

					serdeCls=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6952); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(serdeCls);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1496:111: ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
					int alt107=2;
					int LA107_0 = input.LA(1);
					if ( (LA107_0==KW_INPUTDRIVER) ) {
						alt107=1;
					}
					switch (alt107) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1496:112: KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral
							{
							KW_INPUTDRIVER394=(Token)match(input,KW_INPUTDRIVER,FOLLOW_KW_INPUTDRIVER_in_fileFormat6955); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_INPUTDRIVER.add(KW_INPUTDRIVER394);

							inDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6959); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_StringLiteral.add(inDriver);

							KW_OUTPUTDRIVER395=(Token)match(input,KW_OUTPUTDRIVER,FOLLOW_KW_OUTPUTDRIVER_in_fileFormat6961); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_OUTPUTDRIVER.add(KW_OUTPUTDRIVER395);

							outDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6965); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_StringLiteral.add(outDriver);

							}
							break;

					}

					// AST REWRITE
					// elements: serdeCls, outDriver, outFmt, inDriver, inFmt
					// token labels: inFmt, inDriver, serdeCls, outDriver, outFmt
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_inFmt=new RewriteRuleTokenStream(adaptor,"token inFmt",inFmt);
					RewriteRuleTokenStream stream_inDriver=new RewriteRuleTokenStream(adaptor,"token inDriver",inDriver);
					RewriteRuleTokenStream stream_serdeCls=new RewriteRuleTokenStream(adaptor,"token serdeCls",serdeCls);
					RewriteRuleTokenStream stream_outDriver=new RewriteRuleTokenStream(adaptor,"token outDriver",outDriver);
					RewriteRuleTokenStream stream_outFmt=new RewriteRuleTokenStream(adaptor,"token outFmt",outFmt);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1497:7: -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1497:10: ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEFILEFORMAT, "TOK_TABLEFILEFORMAT"), root_1);
						adaptor.addChild(root_1, stream_inFmt.nextNode());
						adaptor.addChild(root_1, stream_outFmt.nextNode());
						adaptor.addChild(root_1, stream_serdeCls.nextNode());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1497:58: ( $inDriver)?
						if ( stream_inDriver.hasNext() ) {
							adaptor.addChild(root_1, stream_inDriver.nextNode());
						}
						stream_inDriver.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1497:69: ( $outDriver)?
						if ( stream_outDriver.hasNext() ) {
							adaptor.addChild(root_1, stream_outDriver.nextNode());
						}
						stream_outDriver.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1498:7: genericSpec= identifier
					{
					pushFollow(FOLLOW_identifier_in_fileFormat7006);
					genericSpec=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(genericSpec.getTree());
					// AST REWRITE
					// elements: genericSpec
					// token labels: 
					// rule labels: genericSpec, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_genericSpec=new RewriteRuleSubtreeStream(adaptor,"rule genericSpec",genericSpec!=null?genericSpec.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1498:30: -> ^( TOK_FILEFORMAT_GENERIC $genericSpec)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1498:33: ^( TOK_FILEFORMAT_GENERIC $genericSpec)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FILEFORMAT_GENERIC, "TOK_FILEFORMAT_GENERIC"), root_1);
						adaptor.addChild(root_1, stream_genericSpec.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "fileFormat"


	public static class inputFileFormat_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "inputFileFormat"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1501:1: inputFileFormat : KW_INPUTFORMAT inFmt= StringLiteral KW_SERDE serdeCls= StringLiteral -> ^( TOK_INPUTFORMAT $inFmt $serdeCls) ;
	public final HiveParser.inputFileFormat_return inputFileFormat() throws RecognitionException {
		HiveParser.inputFileFormat_return retval = new HiveParser.inputFileFormat_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token inFmt=null;
		Token serdeCls=null;
		Token KW_INPUTFORMAT396=null;
		Token KW_SERDE397=null;

		ASTNode inFmt_tree=null;
		ASTNode serdeCls_tree=null;
		ASTNode KW_INPUTFORMAT396_tree=null;
		ASTNode KW_SERDE397_tree=null;
		RewriteRuleTokenStream stream_KW_INPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_INPUTFORMAT");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_SERDE=new RewriteRuleTokenStream(adaptor,"token KW_SERDE");

		 pushMsg("Load Data input file format specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1504:5: ( KW_INPUTFORMAT inFmt= StringLiteral KW_SERDE serdeCls= StringLiteral -> ^( TOK_INPUTFORMAT $inFmt $serdeCls) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1504:7: KW_INPUTFORMAT inFmt= StringLiteral KW_SERDE serdeCls= StringLiteral
			{
			KW_INPUTFORMAT396=(Token)match(input,KW_INPUTFORMAT,FOLLOW_KW_INPUTFORMAT_in_inputFileFormat7042); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_INPUTFORMAT.add(KW_INPUTFORMAT396);

			inFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_inputFileFormat7046); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(inFmt);

			KW_SERDE397=(Token)match(input,KW_SERDE,FOLLOW_KW_SERDE_in_inputFileFormat7048); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SERDE.add(KW_SERDE397);

			serdeCls=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_inputFileFormat7052); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(serdeCls);

			// AST REWRITE
			// elements: serdeCls, inFmt
			// token labels: inFmt, serdeCls
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_inFmt=new RewriteRuleTokenStream(adaptor,"token inFmt",inFmt);
			RewriteRuleTokenStream stream_serdeCls=new RewriteRuleTokenStream(adaptor,"token serdeCls",serdeCls);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1505:7: -> ^( TOK_INPUTFORMAT $inFmt $serdeCls)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1505:10: ^( TOK_INPUTFORMAT $inFmt $serdeCls)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INPUTFORMAT, "TOK_INPUTFORMAT"), root_1);
				adaptor.addChild(root_1, stream_inFmt.nextNode());
				adaptor.addChild(root_1, stream_serdeCls.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "inputFileFormat"


	public static class tabTypeExpr_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tabTypeExpr"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1508:1: tabTypeExpr : identifier ( DOT ^ identifier )? ( identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )* )? ;
	public final HiveParser.tabTypeExpr_return tabTypeExpr() throws RecognitionException {
		HiveParser.tabTypeExpr_return retval = new HiveParser.tabTypeExpr_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token DOT399=null;
		Token DOT402=null;
		Token KW_ELEM_TYPE403=null;
		Token KW_KEY_TYPE404=null;
		Token KW_VALUE_TYPE405=null;
		ParserRuleReturnScope identifier398 =null;
		ParserRuleReturnScope identifier400 =null;
		ParserRuleReturnScope identifier401 =null;
		ParserRuleReturnScope identifier406 =null;

		ASTNode DOT399_tree=null;
		ASTNode DOT402_tree=null;
		ASTNode KW_ELEM_TYPE403_tree=null;
		ASTNode KW_KEY_TYPE404_tree=null;
		ASTNode KW_VALUE_TYPE405_tree=null;

		 pushMsg("specifying table types", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1511:4: ( identifier ( DOT ^ identifier )? ( identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )* )? )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1511:6: identifier ( DOT ^ identifier )? ( identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )* )?
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_identifier_in_tabTypeExpr7096);
			identifier398=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier398.getTree());

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1511:17: ( DOT ^ identifier )?
			int alt109=2;
			int LA109_0 = input.LA(1);
			if ( (LA109_0==DOT) ) {
				alt109=1;
			}
			switch (alt109) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1511:18: DOT ^ identifier
					{
					DOT399=(Token)match(input,DOT,FOLLOW_DOT_in_tabTypeExpr7099); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					DOT399_tree = (ASTNode)adaptor.create(DOT399);
					root_0 = (ASTNode)adaptor.becomeRoot(DOT399_tree, root_0);
					}

					pushFollow(FOLLOW_identifier_in_tabTypeExpr7102);
					identifier400=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier400.getTree());

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1512:4: ( identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )* )?
			int alt112=2;
			int LA112_0 = input.LA(1);
			if ( (LA112_0==Identifier||(LA112_0 >= KW_ABORT && LA112_0 <= KW_AFTER)||LA112_0==KW_ALLOC_FRACTION||LA112_0==KW_ANALYZE||LA112_0==KW_ARCHIVE||LA112_0==KW_ASC||(LA112_0 >= KW_AUTOCOMMIT && LA112_0 <= KW_BEFORE)||(LA112_0 >= KW_BUCKET && LA112_0 <= KW_BUCKETS)||(LA112_0 >= KW_CACHE && LA112_0 <= KW_CASCADE)||LA112_0==KW_CHANGE||(LA112_0 >= KW_CHECK && LA112_0 <= KW_COLLECTION)||(LA112_0 >= KW_COLUMNS && LA112_0 <= KW_COMMENT)||(LA112_0 >= KW_COMPACT && LA112_0 <= KW_CONCATENATE)||LA112_0==KW_CONTINUE||LA112_0==KW_DATA||LA112_0==KW_DATABASES||(LA112_0 >= KW_DATETIME && LA112_0 <= KW_DBPROPERTIES)||(LA112_0 >= KW_DEFAULT && LA112_0 <= KW_DEFINED)||(LA112_0 >= KW_DELIMITED && LA112_0 <= KW_DESC)||(LA112_0 >= KW_DETAIL && LA112_0 <= KW_DISABLE)||(LA112_0 >= KW_DISTRIBUTE && LA112_0 <= KW_DO)||LA112_0==KW_DOW||(LA112_0 >= KW_DUMP && LA112_0 <= KW_ELEM_TYPE)||LA112_0==KW_ENABLE||(LA112_0 >= KW_ENFORCED && LA112_0 <= KW_ESCAPED)||LA112_0==KW_EXCLUSIVE||(LA112_0 >= KW_EXPLAIN && LA112_0 <= KW_EXPRESSION)||(LA112_0 >= KW_FIELDS && LA112_0 <= KW_FIRST)||(LA112_0 >= KW_FORMAT && LA112_0 <= KW_FORMATTED)||LA112_0==KW_FUNCTIONS||(LA112_0 >= KW_HOUR && LA112_0 <= KW_IDXPROPERTIES)||(LA112_0 >= KW_INDEX && LA112_0 <= KW_INDEXES)||(LA112_0 >= KW_INPATH && LA112_0 <= KW_INPUTFORMAT)||(LA112_0 >= KW_ISOLATION && LA112_0 <= KW_JAR)||(LA112_0 >= KW_KEY && LA112_0 <= KW_LAST)||LA112_0==KW_LEVEL||(LA112_0 >= KW_LIMIT && LA112_0 <= KW_LOAD)||(LA112_0 >= KW_LOCATION && LA112_0 <= KW_LONG)||LA112_0==KW_MANAGEMENT||(LA112_0 >= KW_MAPJOIN && LA112_0 <= KW_MATERIALIZED)||LA112_0==KW_METADATA||(LA112_0 >= KW_MINUTE && LA112_0 <= KW_MONTH)||(LA112_0 >= KW_MOVE && LA112_0 <= KW_MSCK)||(LA112_0 >= KW_NORELY && LA112_0 <= KW_NOSCAN)||LA112_0==KW_NOVALIDATE||LA112_0==KW_NULLS||LA112_0==KW_OFFSET||(LA112_0 >= KW_OPERATOR && LA112_0 <= KW_OPTION)||(LA112_0 >= KW_OUTPUTDRIVER && LA112_0 <= KW_OUTPUTFORMAT)||(LA112_0 >= KW_OVERWRITE && LA112_0 <= KW_OWNER)||(LA112_0 >= KW_PARTITIONED && LA112_0 <= KW_PATH)||(LA112_0 >= KW_PLAN && LA112_0 <= KW_POOL)||LA112_0==KW_PRINCIPALS||(LA112_0 >= KW_PURGE && LA112_0 <= KW_QUERY_PARALLELISM)||LA112_0==KW_READ||(LA112_0 >= KW_REBUILD && LA112_0 <= KW_RECORDWRITER)||(LA112_0 >= KW_RELOAD && LA112_0 <= KW_RESTRICT)||LA112_0==KW_REWRITE||(LA112_0 >= KW_ROLE && LA112_0 <= KW_ROLES)||(LA112_0 >= KW_SCHEDULING_POLICY && LA112_0 <= KW_SECOND)||(LA112_0 >= KW_SEMI && LA112_0 <= KW_SERVER)||(LA112_0 >= KW_SETS && LA112_0 <= KW_SKEWED)||(LA112_0 >= KW_SNAPSHOT && LA112_0 <= KW_SSL)||(LA112_0 >= KW_STATISTICS && LA112_0 <= KW_SUMMARY)||LA112_0==KW_TABLES||(LA112_0 >= KW_TBLPROPERTIES && LA112_0 <= KW_TERMINATED)||LA112_0==KW_TINYINT||(LA112_0 >= KW_TOUCH && LA112_0 <= KW_TRANSACTIONS)||LA112_0==KW_UNARCHIVE||LA112_0==KW_UNDO||LA112_0==KW_UNIONTYPE||(LA112_0 >= KW_UNLOCK && LA112_0 <= KW_UNSIGNED)||(LA112_0 >= KW_URI && LA112_0 <= KW_USE)||(LA112_0 >= KW_UTC && LA112_0 <= KW_VALIDATE)||LA112_0==KW_VALUE_TYPE||(LA112_0 >= KW_VECTORIZATION && LA112_0 <= KW_WEEK)||LA112_0==KW_WHILE||(LA112_0 >= KW_WORK && LA112_0 <= KW_ZONE)||LA112_0==KW_BATCH||LA112_0==KW_DAYOFWEEK||LA112_0==KW_HOLD_DDLTIME||LA112_0==KW_IGNORE||LA112_0==KW_NO_DROP||LA112_0==KW_OFFLINE||LA112_0==KW_PROTECTION||LA112_0==KW_READONLY||LA112_0==KW_TIMESTAMPTZ) ) {
				alt112=1;
			}
			switch (alt112) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1512:5: identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )*
					{
					pushFollow(FOLLOW_identifier_in_tabTypeExpr7110);
					identifier401=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier401.getTree());

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1512:16: ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )*
					loop111:
					while (true) {
						int alt111=2;
						int LA111_0 = input.LA(1);
						if ( (LA111_0==DOT) ) {
							alt111=1;
						}

						switch (alt111) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1512:17: DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier )
							{
							DOT402=(Token)match(input,DOT,FOLLOW_DOT_in_tabTypeExpr7113); if (state.failed) return retval;
							if ( state.backtracking==0 ) {
							DOT402_tree = (ASTNode)adaptor.create(DOT402);
							root_0 = (ASTNode)adaptor.becomeRoot(DOT402_tree, root_0);
							}

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1513:4: ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier )
							int alt110=4;
							switch ( input.LA(1) ) {
							case KW_ELEM_TYPE:
								{
								int LA110_1 = input.LA(2);
								if ( (synpred4_HiveParser()) ) {
									alt110=1;
								}
								else if ( (true) ) {
									alt110=4;
								}

								}
								break;
							case KW_KEY_TYPE:
								{
								int LA110_2 = input.LA(2);
								if ( (synpred5_HiveParser()) ) {
									alt110=2;
								}
								else if ( (true) ) {
									alt110=4;
								}

								}
								break;
							case KW_VALUE_TYPE:
								{
								int LA110_3 = input.LA(2);
								if ( (synpred6_HiveParser()) ) {
									alt110=3;
								}
								else if ( (true) ) {
									alt110=4;
								}

								}
								break;
							case Identifier:
							case KW_ABORT:
							case KW_ACTIVATE:
							case KW_ACTIVE:
							case KW_ADD:
							case KW_ADMIN:
							case KW_AFTER:
							case KW_ALLOC_FRACTION:
							case KW_ANALYZE:
							case KW_ARCHIVE:
							case KW_ASC:
							case KW_AUTOCOMMIT:
							case KW_BEFORE:
							case KW_BUCKET:
							case KW_BUCKETS:
							case KW_CACHE:
							case KW_CASCADE:
							case KW_CHANGE:
							case KW_CHECK:
							case KW_CLUSTER:
							case KW_CLUSTERED:
							case KW_CLUSTERSTATUS:
							case KW_COLLECTION:
							case KW_COLUMNS:
							case KW_COMMENT:
							case KW_COMPACT:
							case KW_COMPACTIONS:
							case KW_COMPUTE:
							case KW_CONCATENATE:
							case KW_CONTINUE:
							case KW_DATA:
							case KW_DATABASES:
							case KW_DATETIME:
							case KW_DAY:
							case KW_DBPROPERTIES:
							case KW_DEFAULT:
							case KW_DEFERRED:
							case KW_DEFINED:
							case KW_DELIMITED:
							case KW_DEPENDENCY:
							case KW_DESC:
							case KW_DETAIL:
							case KW_DIRECTORIES:
							case KW_DIRECTORY:
							case KW_DISABLE:
							case KW_DISTRIBUTE:
							case KW_DO:
							case KW_DOW:
							case KW_DUMP:
							case KW_ENABLE:
							case KW_ENFORCED:
							case KW_ESCAPED:
							case KW_EXCLUSIVE:
							case KW_EXPLAIN:
							case KW_EXPORT:
							case KW_EXPRESSION:
							case KW_FIELDS:
							case KW_FILE:
							case KW_FILEFORMAT:
							case KW_FIRST:
							case KW_FORMAT:
							case KW_FORMATTED:
							case KW_FUNCTIONS:
							case KW_HOUR:
							case KW_IDXPROPERTIES:
							case KW_INDEX:
							case KW_INDEXES:
							case KW_INPATH:
							case KW_INPUTDRIVER:
							case KW_INPUTFORMAT:
							case KW_ISOLATION:
							case KW_ITEMS:
							case KW_JAR:
							case KW_KEY:
							case KW_KEYS:
							case KW_KILL:
							case KW_LAST:
							case KW_LEVEL:
							case KW_LIMIT:
							case KW_LINES:
							case KW_LOAD:
							case KW_LOCATION:
							case KW_LOCK:
							case KW_LOCKS:
							case KW_LOGICAL:
							case KW_LONG:
							case KW_MANAGEMENT:
							case KW_MAPJOIN:
							case KW_MAPPING:
							case KW_MATCHED:
							case KW_MATERIALIZED:
							case KW_METADATA:
							case KW_MINUTE:
							case KW_MONTH:
							case KW_MOVE:
							case KW_MSCK:
							case KW_NORELY:
							case KW_NOSCAN:
							case KW_NOVALIDATE:
							case KW_NULLS:
							case KW_OFFSET:
							case KW_OPERATOR:
							case KW_OPTION:
							case KW_OUTPUTDRIVER:
							case KW_OUTPUTFORMAT:
							case KW_OVERWRITE:
							case KW_OWNER:
							case KW_PARTITIONED:
							case KW_PARTITIONS:
							case KW_PATH:
							case KW_PLAN:
							case KW_PLANS:
							case KW_PLUS:
							case KW_POOL:
							case KW_PRINCIPALS:
							case KW_PURGE:
							case KW_QUARTER:
							case KW_QUERY:
							case KW_QUERY_PARALLELISM:
							case KW_READ:
							case KW_REBUILD:
							case KW_RECORDREADER:
							case KW_RECORDWRITER:
							case KW_RELOAD:
							case KW_RELY:
							case KW_RENAME:
							case KW_REOPTIMIZATION:
							case KW_REPAIR:
							case KW_REPL:
							case KW_REPLACE:
							case KW_REPLICATION:
							case KW_RESOURCE:
							case KW_RESTRICT:
							case KW_REWRITE:
							case KW_ROLE:
							case KW_ROLES:
							case KW_SCHEDULING_POLICY:
							case KW_SCHEMA:
							case KW_SCHEMAS:
							case KW_SECOND:
							case KW_SEMI:
							case KW_SERDE:
							case KW_SERDEPROPERTIES:
							case KW_SERVER:
							case KW_SETS:
							case KW_SHARED:
							case KW_SHOW:
							case KW_SHOW_DATABASE:
							case KW_SKEWED:
							case KW_SNAPSHOT:
							case KW_SORT:
							case KW_SORTED:
							case KW_SSL:
							case KW_STATISTICS:
							case KW_STATUS:
							case KW_STORED:
							case KW_STREAMTABLE:
							case KW_STRING:
							case KW_STRUCT:
							case KW_SUMMARY:
							case KW_TABLES:
							case KW_TBLPROPERTIES:
							case KW_TEMPORARY:
							case KW_TERMINATED:
							case KW_TINYINT:
							case KW_TOUCH:
							case KW_TRANSACTION:
							case KW_TRANSACTIONS:
							case KW_UNARCHIVE:
							case KW_UNDO:
							case KW_UNIONTYPE:
							case KW_UNLOCK:
							case KW_UNMANAGED:
							case KW_UNSET:
							case KW_UNSIGNED:
							case KW_URI:
							case KW_USE:
							case KW_UTC:
							case KW_UTCTIMESTAMP:
							case KW_VALIDATE:
							case KW_VECTORIZATION:
							case KW_VIEW:
							case KW_VIEWS:
							case KW_WAIT:
							case KW_WEEK:
							case KW_WHILE:
							case KW_WORK:
							case KW_WORKLOAD:
							case KW_WRITE:
							case KW_YEAR:
							case KW_ZONE:
							case KW_BATCH:
							case KW_DAYOFWEEK:
							case KW_HOLD_DDLTIME:
							case KW_IGNORE:
							case KW_NO_DROP:
							case KW_OFFLINE:
							case KW_PROTECTION:
							case KW_READONLY:
							case KW_TIMESTAMPTZ:
								{
								alt110=4;
								}
								break;
							default:
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 110, 0, input);
								throw nvae;
							}
							switch (alt110) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1514:4: ( KW_ELEM_TYPE )=> KW_ELEM_TYPE
									{
									KW_ELEM_TYPE403=(Token)match(input,KW_ELEM_TYPE,FOLLOW_KW_ELEM_TYPE_in_tabTypeExpr7130); if (state.failed) return retval;
									if ( state.backtracking==0 ) {
									KW_ELEM_TYPE403_tree = (ASTNode)adaptor.create(KW_ELEM_TYPE403);
									adaptor.addChild(root_0, KW_ELEM_TYPE403_tree);
									}

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1516:4: ( KW_KEY_TYPE )=> KW_KEY_TYPE
									{
									KW_KEY_TYPE404=(Token)match(input,KW_KEY_TYPE,FOLLOW_KW_KEY_TYPE_in_tabTypeExpr7147); if (state.failed) return retval;
									if ( state.backtracking==0 ) {
									KW_KEY_TYPE404_tree = (ASTNode)adaptor.create(KW_KEY_TYPE404);
									adaptor.addChild(root_0, KW_KEY_TYPE404_tree);
									}

									}
									break;
								case 3 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1518:4: ( KW_VALUE_TYPE )=> KW_VALUE_TYPE
									{
									KW_VALUE_TYPE405=(Token)match(input,KW_VALUE_TYPE,FOLLOW_KW_VALUE_TYPE_in_tabTypeExpr7164); if (state.failed) return retval;
									if ( state.backtracking==0 ) {
									KW_VALUE_TYPE405_tree = (ASTNode)adaptor.create(KW_VALUE_TYPE405);
									adaptor.addChild(root_0, KW_VALUE_TYPE405_tree);
									}

									}
									break;
								case 4 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1519:6: identifier
									{
									pushFollow(FOLLOW_identifier_in_tabTypeExpr7172);
									identifier406=identifier();
									state._fsp--;
									if (state.failed) return retval;
									if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier406.getTree());

									}
									break;

							}

							}
							break;

						default :
							break loop111;
						}
					}

					}
					break;

			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tabTypeExpr"


	public static class partTypeExpr_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "partTypeExpr"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1524:1: partTypeExpr : tabTypeExpr ( partitionSpec )? -> ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? ) ;
	public final HiveParser.partTypeExpr_return partTypeExpr() throws RecognitionException {
		HiveParser.partTypeExpr_return retval = new HiveParser.partTypeExpr_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope tabTypeExpr407 =null;
		ParserRuleReturnScope partitionSpec408 =null;

		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tabTypeExpr=new RewriteRuleSubtreeStream(adaptor,"rule tabTypeExpr");

		 pushMsg("specifying table partitions", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1527:5: ( tabTypeExpr ( partitionSpec )? -> ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1527:8: tabTypeExpr ( partitionSpec )?
			{
			pushFollow(FOLLOW_tabTypeExpr_in_partTypeExpr7212);
			tabTypeExpr407=tabTypeExpr();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tabTypeExpr.add(tabTypeExpr407.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1527:20: ( partitionSpec )?
			int alt113=2;
			int LA113_0 = input.LA(1);
			if ( (LA113_0==KW_PARTITION) ) {
				alt113=1;
			}
			switch (alt113) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1527:20: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_partTypeExpr7214);
					partitionSpec408=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec408.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: partitionSpec, tabTypeExpr
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1527:35: -> ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1527:38: ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABTYPE, "TOK_TABTYPE"), root_1);
				adaptor.addChild(root_1, stream_tabTypeExpr.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1527:64: ( partitionSpec )?
				if ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "partTypeExpr"


	public static class tabPartColTypeExpr_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tabPartColTypeExpr"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1530:1: tabPartColTypeExpr : tableName ( partitionSpec )? ( extColumnName )? -> ^( TOK_TABTYPE tableName ( partitionSpec )? ( extColumnName )? ) ;
	public final HiveParser.tabPartColTypeExpr_return tabPartColTypeExpr() throws RecognitionException {
		HiveParser.tabPartColTypeExpr_return retval = new HiveParser.tabPartColTypeExpr_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope tableName409 =null;
		ParserRuleReturnScope partitionSpec410 =null;
		ParserRuleReturnScope extColumnName411 =null;

		RewriteRuleSubtreeStream stream_extColumnName=new RewriteRuleSubtreeStream(adaptor,"rule extColumnName");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("specifying table partitions columnName", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:5: ( tableName ( partitionSpec )? ( extColumnName )? -> ^( TOK_TABTYPE tableName ( partitionSpec )? ( extColumnName )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:8: tableName ( partitionSpec )? ( extColumnName )?
			{
			pushFollow(FOLLOW_tableName_in_tabPartColTypeExpr7254);
			tableName409=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName409.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:18: ( partitionSpec )?
			int alt114=2;
			int LA114_0 = input.LA(1);
			if ( (LA114_0==KW_PARTITION) ) {
				alt114=1;
			}
			switch (alt114) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:18: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_tabPartColTypeExpr7256);
					partitionSpec410=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec410.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:33: ( extColumnName )?
			int alt115=2;
			int LA115_0 = input.LA(1);
			if ( (LA115_0==Identifier||(LA115_0 >= KW_ABORT && LA115_0 <= KW_AFTER)||LA115_0==KW_ALLOC_FRACTION||LA115_0==KW_ANALYZE||LA115_0==KW_ARCHIVE||LA115_0==KW_ASC||(LA115_0 >= KW_AUTOCOMMIT && LA115_0 <= KW_BEFORE)||(LA115_0 >= KW_BUCKET && LA115_0 <= KW_BUCKETS)||(LA115_0 >= KW_CACHE && LA115_0 <= KW_CASCADE)||LA115_0==KW_CHANGE||(LA115_0 >= KW_CHECK && LA115_0 <= KW_COLLECTION)||(LA115_0 >= KW_COLUMNS && LA115_0 <= KW_COMMENT)||(LA115_0 >= KW_COMPACT && LA115_0 <= KW_CONCATENATE)||LA115_0==KW_CONTINUE||LA115_0==KW_DATA||LA115_0==KW_DATABASES||(LA115_0 >= KW_DATETIME && LA115_0 <= KW_DBPROPERTIES)||(LA115_0 >= KW_DEFAULT && LA115_0 <= KW_DEFINED)||(LA115_0 >= KW_DELIMITED && LA115_0 <= KW_DESC)||(LA115_0 >= KW_DETAIL && LA115_0 <= KW_DISABLE)||(LA115_0 >= KW_DISTRIBUTE && LA115_0 <= KW_DO)||LA115_0==KW_DOW||(LA115_0 >= KW_DUMP && LA115_0 <= KW_ELEM_TYPE)||LA115_0==KW_ENABLE||(LA115_0 >= KW_ENFORCED && LA115_0 <= KW_ESCAPED)||LA115_0==KW_EXCLUSIVE||(LA115_0 >= KW_EXPLAIN && LA115_0 <= KW_EXPRESSION)||(LA115_0 >= KW_FIELDS && LA115_0 <= KW_FIRST)||(LA115_0 >= KW_FORMAT && LA115_0 <= KW_FORMATTED)||LA115_0==KW_FUNCTIONS||(LA115_0 >= KW_HOUR && LA115_0 <= KW_IDXPROPERTIES)||(LA115_0 >= KW_INDEX && LA115_0 <= KW_INDEXES)||(LA115_0 >= KW_INPATH && LA115_0 <= KW_INPUTFORMAT)||(LA115_0 >= KW_ISOLATION && LA115_0 <= KW_JAR)||(LA115_0 >= KW_KEY && LA115_0 <= KW_LAST)||LA115_0==KW_LEVEL||(LA115_0 >= KW_LIMIT && LA115_0 <= KW_LOAD)||(LA115_0 >= KW_LOCATION && LA115_0 <= KW_LONG)||LA115_0==KW_MANAGEMENT||(LA115_0 >= KW_MAPJOIN && LA115_0 <= KW_MATERIALIZED)||LA115_0==KW_METADATA||(LA115_0 >= KW_MINUTE && LA115_0 <= KW_MONTH)||(LA115_0 >= KW_MOVE && LA115_0 <= KW_MSCK)||(LA115_0 >= KW_NORELY && LA115_0 <= KW_NOSCAN)||LA115_0==KW_NOVALIDATE||LA115_0==KW_NULLS||LA115_0==KW_OFFSET||(LA115_0 >= KW_OPERATOR && LA115_0 <= KW_OPTION)||(LA115_0 >= KW_OUTPUTDRIVER && LA115_0 <= KW_OUTPUTFORMAT)||(LA115_0 >= KW_OVERWRITE && LA115_0 <= KW_OWNER)||(LA115_0 >= KW_PARTITIONED && LA115_0 <= KW_PATH)||(LA115_0 >= KW_PLAN && LA115_0 <= KW_POOL)||LA115_0==KW_PRINCIPALS||(LA115_0 >= KW_PURGE && LA115_0 <= KW_QUERY_PARALLELISM)||LA115_0==KW_READ||(LA115_0 >= KW_REBUILD && LA115_0 <= KW_RECORDWRITER)||(LA115_0 >= KW_RELOAD && LA115_0 <= KW_RESTRICT)||LA115_0==KW_REWRITE||(LA115_0 >= KW_ROLE && LA115_0 <= KW_ROLES)||(LA115_0 >= KW_SCHEDULING_POLICY && LA115_0 <= KW_SECOND)||(LA115_0 >= KW_SEMI && LA115_0 <= KW_SERVER)||(LA115_0 >= KW_SETS && LA115_0 <= KW_SKEWED)||(LA115_0 >= KW_SNAPSHOT && LA115_0 <= KW_SSL)||(LA115_0 >= KW_STATISTICS && LA115_0 <= KW_SUMMARY)||LA115_0==KW_TABLES||(LA115_0 >= KW_TBLPROPERTIES && LA115_0 <= KW_TERMINATED)||LA115_0==KW_TINYINT||(LA115_0 >= KW_TOUCH && LA115_0 <= KW_TRANSACTIONS)||LA115_0==KW_UNARCHIVE||LA115_0==KW_UNDO||LA115_0==KW_UNIONTYPE||(LA115_0 >= KW_UNLOCK && LA115_0 <= KW_UNSIGNED)||(LA115_0 >= KW_URI && LA115_0 <= KW_USE)||(LA115_0 >= KW_UTC && LA115_0 <= KW_VALIDATE)||LA115_0==KW_VALUE_TYPE||(LA115_0 >= KW_VECTORIZATION && LA115_0 <= KW_WEEK)||LA115_0==KW_WHILE||(LA115_0 >= KW_WORK && LA115_0 <= KW_ZONE)||LA115_0==KW_BATCH||LA115_0==KW_DAYOFWEEK||LA115_0==KW_HOLD_DDLTIME||LA115_0==KW_IGNORE||LA115_0==KW_NO_DROP||LA115_0==KW_OFFLINE||LA115_0==KW_PROTECTION||LA115_0==KW_READONLY||LA115_0==KW_TIMESTAMPTZ) ) {
				alt115=1;
			}
			switch (alt115) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:33: extColumnName
					{
					pushFollow(FOLLOW_extColumnName_in_tabPartColTypeExpr7259);
					extColumnName411=extColumnName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_extColumnName.add(extColumnName411.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: extColumnName, partitionSpec, tableName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1533:48: -> ^( TOK_TABTYPE tableName ( partitionSpec )? ( extColumnName )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:51: ^( TOK_TABTYPE tableName ( partitionSpec )? ( extColumnName )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABTYPE, "TOK_TABTYPE"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:75: ( partitionSpec )?
				if ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1533:90: ( extColumnName )?
				if ( stream_extColumnName.hasNext() ) {
					adaptor.addChild(root_1, stream_extColumnName.nextTree());
				}
				stream_extColumnName.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tabPartColTypeExpr"


	public static class descStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "descStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1536:1: descStatement : ( KW_DESCRIBE | KW_DESC ) ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier ) -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? ) | ( KW_FUNCTION )=> KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames ) -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? ) | ( KW_FORMATTED | KW_EXTENDED )=> ( (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED ) parttype= tabPartColTypeExpr ) -> ^( TOK_DESCTABLE $parttype $descOptions) |parttype= tabPartColTypeExpr -> ^( TOK_DESCTABLE $parttype) ) ;
	public final HiveParser.descStatement_return descStatement() throws RecognitionException {
		HiveParser.descStatement_return retval = new HiveParser.descStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token descOptions=null;
		Token KW_DESCRIBE412=null;
		Token KW_DESC413=null;
		Token KW_DATABASE414=null;
		Token KW_SCHEMA415=null;
		Token KW_EXTENDED416=null;
		Token KW_FUNCTION417=null;
		Token KW_EXTENDED418=null;
		ParserRuleReturnScope dbName =null;
		ParserRuleReturnScope name =null;
		ParserRuleReturnScope parttype =null;

		ASTNode descOptions_tree=null;
		ASTNode KW_DESCRIBE412_tree=null;
		ASTNode KW_DESC413_tree=null;
		ASTNode KW_DATABASE414_tree=null;
		ASTNode KW_SCHEMA415_tree=null;
		ASTNode KW_EXTENDED416_tree=null;
		ASTNode KW_FUNCTION417_tree=null;
		ASTNode KW_EXTENDED418_tree=null;
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_KW_EXTENDED=new RewriteRuleTokenStream(adaptor,"token KW_EXTENDED");
		RewriteRuleTokenStream stream_KW_DESC=new RewriteRuleTokenStream(adaptor,"token KW_DESC");
		RewriteRuleTokenStream stream_KW_FUNCTION=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTION");
		RewriteRuleTokenStream stream_KW_FORMATTED=new RewriteRuleTokenStream(adaptor,"token KW_FORMATTED");
		RewriteRuleTokenStream stream_KW_DESCRIBE=new RewriteRuleTokenStream(adaptor,"token KW_DESCRIBE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_tabPartColTypeExpr=new RewriteRuleSubtreeStream(adaptor,"rule tabPartColTypeExpr");
		RewriteRuleSubtreeStream stream_descFuncNames=new RewriteRuleSubtreeStream(adaptor,"rule descFuncNames");

		 pushMsg("describe statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1539:5: ( ( KW_DESCRIBE | KW_DESC ) ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier ) -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? ) | ( KW_FUNCTION )=> KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames ) -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? ) | ( KW_FORMATTED | KW_EXTENDED )=> ( (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED ) parttype= tabPartColTypeExpr ) -> ^( TOK_DESCTABLE $parttype $descOptions) |parttype= tabPartColTypeExpr -> ^( TOK_DESCTABLE $parttype) ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1540:5: ( KW_DESCRIBE | KW_DESC ) ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier ) -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? ) | ( KW_FUNCTION )=> KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames ) -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? ) | ( KW_FORMATTED | KW_EXTENDED )=> ( (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED ) parttype= tabPartColTypeExpr ) -> ^( TOK_DESCTABLE $parttype $descOptions) |parttype= tabPartColTypeExpr -> ^( TOK_DESCTABLE $parttype) )
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1540:5: ( KW_DESCRIBE | KW_DESC )
			int alt116=2;
			int LA116_0 = input.LA(1);
			if ( (LA116_0==KW_DESCRIBE) ) {
				alt116=1;
			}
			else if ( (LA116_0==KW_DESC) ) {
				alt116=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 116, 0, input);
				throw nvae;
			}

			switch (alt116) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1540:6: KW_DESCRIBE
					{
					KW_DESCRIBE412=(Token)match(input,KW_DESCRIBE,FOLLOW_KW_DESCRIBE_in_descStatement7306); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DESCRIBE.add(KW_DESCRIBE412);

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1540:18: KW_DESC
					{
					KW_DESC413=(Token)match(input,KW_DESC,FOLLOW_KW_DESC_in_descStatement7308); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DESC.add(KW_DESC413);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1541:5: ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier ) -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? ) | ( KW_FUNCTION )=> KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames ) -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? ) | ( KW_FORMATTED | KW_EXTENDED )=> ( (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED ) parttype= tabPartColTypeExpr ) -> ^( TOK_DESCTABLE $parttype $descOptions) |parttype= tabPartColTypeExpr -> ^( TOK_DESCTABLE $parttype) )
			int alt121=4;
			int LA121_0 = input.LA(1);
			if ( (LA121_0==KW_DATABASE) && (synpred7_HiveParser())) {
				alt121=1;
			}
			else if ( (LA121_0==KW_SCHEMA) ) {
				int LA121_2 = input.LA(2);
				if ( (LA121_2==KW_EXTENDED) && (synpred7_HiveParser())) {
					alt121=1;
				}
				else if ( (LA121_2==Identifier) ) {
					int LA121_9 = input.LA(3);
					if ( (synpred7_HiveParser()) ) {
						alt121=1;
					}
					else if ( (true) ) {
						alt121=4;
					}

				}
				else if ( ((LA121_2 >= KW_ABORT && LA121_2 <= KW_AFTER)||LA121_2==KW_ALLOC_FRACTION||LA121_2==KW_ANALYZE||LA121_2==KW_ARCHIVE||LA121_2==KW_ASC||(LA121_2 >= KW_AUTOCOMMIT && LA121_2 <= KW_BEFORE)||(LA121_2 >= KW_BUCKET && LA121_2 <= KW_BUCKETS)||(LA121_2 >= KW_CACHE && LA121_2 <= KW_CASCADE)||LA121_2==KW_CHANGE||(LA121_2 >= KW_CHECK && LA121_2 <= KW_COLLECTION)||(LA121_2 >= KW_COLUMNS && LA121_2 <= KW_COMMENT)||(LA121_2 >= KW_COMPACT && LA121_2 <= KW_CONCATENATE)||LA121_2==KW_CONTINUE||LA121_2==KW_DATA||LA121_2==KW_DATABASES||(LA121_2 >= KW_DATETIME && LA121_2 <= KW_DBPROPERTIES)||(LA121_2 >= KW_DEFAULT && LA121_2 <= KW_DEFINED)||(LA121_2 >= KW_DELIMITED && LA121_2 <= KW_DESC)||(LA121_2 >= KW_DETAIL && LA121_2 <= KW_DISABLE)||(LA121_2 >= KW_DISTRIBUTE && LA121_2 <= KW_DO)||LA121_2==KW_DOW||(LA121_2 >= KW_DUMP && LA121_2 <= KW_ELEM_TYPE)||LA121_2==KW_ENABLE||(LA121_2 >= KW_ENFORCED && LA121_2 <= KW_ESCAPED)||LA121_2==KW_EXCLUSIVE||(LA121_2 >= KW_EXPLAIN && LA121_2 <= KW_EXPRESSION)||(LA121_2 >= KW_FIELDS && LA121_2 <= KW_FIRST)||(LA121_2 >= KW_FORMAT && LA121_2 <= KW_FORMATTED)||LA121_2==KW_FUNCTIONS||(LA121_2 >= KW_HOUR && LA121_2 <= KW_IDXPROPERTIES)||(LA121_2 >= KW_INDEX && LA121_2 <= KW_INDEXES)||(LA121_2 >= KW_INPATH && LA121_2 <= KW_INPUTFORMAT)||(LA121_2 >= KW_ISOLATION && LA121_2 <= KW_JAR)||(LA121_2 >= KW_KEY && LA121_2 <= KW_LAST)||LA121_2==KW_LEVEL||(LA121_2 >= KW_LIMIT && LA121_2 <= KW_LOAD)||(LA121_2 >= KW_LOCATION && LA121_2 <= KW_LONG)||LA121_2==KW_MANAGEMENT||(LA121_2 >= KW_MAPJOIN && LA121_2 <= KW_MATERIALIZED)||LA121_2==KW_METADATA||(LA121_2 >= KW_MINUTE && LA121_2 <= KW_MONTH)||(LA121_2 >= KW_MOVE && LA121_2 <= KW_MSCK)||(LA121_2 >= KW_NORELY && LA121_2 <= KW_NOSCAN)||LA121_2==KW_NOVALIDATE||LA121_2==KW_NULLS||LA121_2==KW_OFFSET||(LA121_2 >= KW_OPERATOR && LA121_2 <= KW_OPTION)||(LA121_2 >= KW_OUTPUTDRIVER && LA121_2 <= KW_OUTPUTFORMAT)||(LA121_2 >= KW_OVERWRITE && LA121_2 <= KW_OWNER)||(LA121_2 >= KW_PARTITIONED && LA121_2 <= KW_PATH)||(LA121_2 >= KW_PLAN && LA121_2 <= KW_POOL)||LA121_2==KW_PRINCIPALS||(LA121_2 >= KW_PURGE && LA121_2 <= KW_QUERY_PARALLELISM)||LA121_2==KW_READ||(LA121_2 >= KW_REBUILD && LA121_2 <= KW_RECORDWRITER)||(LA121_2 >= KW_RELOAD && LA121_2 <= KW_RESTRICT)||LA121_2==KW_REWRITE||(LA121_2 >= KW_ROLE && LA121_2 <= KW_ROLES)||(LA121_2 >= KW_SCHEDULING_POLICY && LA121_2 <= KW_SECOND)||(LA121_2 >= KW_SEMI && LA121_2 <= KW_SERVER)||(LA121_2 >= KW_SETS && LA121_2 <= KW_SKEWED)||(LA121_2 >= KW_SNAPSHOT && LA121_2 <= KW_SSL)||(LA121_2 >= KW_STATISTICS && LA121_2 <= KW_SUMMARY)||LA121_2==KW_TABLES||(LA121_2 >= KW_TBLPROPERTIES && LA121_2 <= KW_TERMINATED)||LA121_2==KW_TINYINT||(LA121_2 >= KW_TOUCH && LA121_2 <= KW_TRANSACTIONS)||LA121_2==KW_UNARCHIVE||LA121_2==KW_UNDO||LA121_2==KW_UNIONTYPE||(LA121_2 >= KW_UNLOCK && LA121_2 <= KW_UNSIGNED)||(LA121_2 >= KW_URI && LA121_2 <= KW_USE)||(LA121_2 >= KW_UTC && LA121_2 <= KW_VALIDATE)||LA121_2==KW_VALUE_TYPE||(LA121_2 >= KW_VECTORIZATION && LA121_2 <= KW_WEEK)||LA121_2==KW_WHILE||(LA121_2 >= KW_WORK && LA121_2 <= KW_ZONE)||LA121_2==KW_BATCH||LA121_2==KW_DAYOFWEEK||LA121_2==KW_HOLD_DDLTIME||LA121_2==KW_IGNORE||LA121_2==KW_NO_DROP||LA121_2==KW_OFFLINE||LA121_2==KW_PROTECTION||LA121_2==KW_READONLY||LA121_2==KW_TIMESTAMPTZ) ) {
					int LA121_10 = input.LA(3);
					if ( (synpred7_HiveParser()) ) {
						alt121=1;
					}
					else if ( (true) ) {
						alt121=4;
					}

				}
				else if ( (LA121_2==EOF||LA121_2==DOT||LA121_2==KW_PARTITION) ) {
					alt121=4;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 121, 2, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}
			else if ( (LA121_0==KW_FUNCTION) && (synpred8_HiveParser())) {
				alt121=2;
			}
			else if ( (LA121_0==KW_FORMATTED) ) {
				switch ( input.LA(2) ) {
				case Identifier:
					{
					int LA121_14 = input.LA(3);
					if ( (synpred9_HiveParser()) ) {
						alt121=3;
					}
					else if ( (true) ) {
						alt121=4;
					}

					}
					break;
				case KW_ABORT:
				case KW_ACTIVATE:
				case KW_ACTIVE:
				case KW_ADD:
				case KW_ADMIN:
				case KW_AFTER:
				case KW_ALLOC_FRACTION:
				case KW_ANALYZE:
				case KW_ARCHIVE:
				case KW_ASC:
				case KW_AUTOCOMMIT:
				case KW_BEFORE:
				case KW_BUCKET:
				case KW_BUCKETS:
				case KW_CACHE:
				case KW_CASCADE:
				case KW_CHANGE:
				case KW_CHECK:
				case KW_CLUSTER:
				case KW_CLUSTERED:
				case KW_CLUSTERSTATUS:
				case KW_COLLECTION:
				case KW_COLUMNS:
				case KW_COMMENT:
				case KW_COMPACT:
				case KW_COMPACTIONS:
				case KW_COMPUTE:
				case KW_CONCATENATE:
				case KW_CONTINUE:
				case KW_DATA:
				case KW_DATABASES:
				case KW_DATETIME:
				case KW_DAY:
				case KW_DBPROPERTIES:
				case KW_DEFAULT:
				case KW_DEFERRED:
				case KW_DEFINED:
				case KW_DELIMITED:
				case KW_DEPENDENCY:
				case KW_DESC:
				case KW_DETAIL:
				case KW_DIRECTORIES:
				case KW_DIRECTORY:
				case KW_DISABLE:
				case KW_DISTRIBUTE:
				case KW_DO:
				case KW_DOW:
				case KW_DUMP:
				case KW_ELEM_TYPE:
				case KW_ENABLE:
				case KW_ENFORCED:
				case KW_ESCAPED:
				case KW_EXCLUSIVE:
				case KW_EXPLAIN:
				case KW_EXPORT:
				case KW_EXPRESSION:
				case KW_FIELDS:
				case KW_FILE:
				case KW_FILEFORMAT:
				case KW_FIRST:
				case KW_FORMAT:
				case KW_FORMATTED:
				case KW_FUNCTIONS:
				case KW_HOUR:
				case KW_IDXPROPERTIES:
				case KW_INDEX:
				case KW_INDEXES:
				case KW_INPATH:
				case KW_INPUTDRIVER:
				case KW_INPUTFORMAT:
				case KW_ISOLATION:
				case KW_ITEMS:
				case KW_JAR:
				case KW_KEY:
				case KW_KEYS:
				case KW_KEY_TYPE:
				case KW_KILL:
				case KW_LAST:
				case KW_LEVEL:
				case KW_LIMIT:
				case KW_LINES:
				case KW_LOAD:
				case KW_LOCATION:
				case KW_LOCK:
				case KW_LOCKS:
				case KW_LOGICAL:
				case KW_LONG:
				case KW_MANAGEMENT:
				case KW_MAPJOIN:
				case KW_MAPPING:
				case KW_MATCHED:
				case KW_MATERIALIZED:
				case KW_METADATA:
				case KW_MINUTE:
				case KW_MONTH:
				case KW_MOVE:
				case KW_MSCK:
				case KW_NORELY:
				case KW_NOSCAN:
				case KW_NOVALIDATE:
				case KW_NULLS:
				case KW_OFFSET:
				case KW_OPERATOR:
				case KW_OPTION:
				case KW_OUTPUTDRIVER:
				case KW_OUTPUTFORMAT:
				case KW_OVERWRITE:
				case KW_OWNER:
				case KW_PARTITIONED:
				case KW_PARTITIONS:
				case KW_PATH:
				case KW_PLAN:
				case KW_PLANS:
				case KW_PLUS:
				case KW_POOL:
				case KW_PRINCIPALS:
				case KW_PURGE:
				case KW_QUARTER:
				case KW_QUERY:
				case KW_QUERY_PARALLELISM:
				case KW_READ:
				case KW_REBUILD:
				case KW_RECORDREADER:
				case KW_RECORDWRITER:
				case KW_RELOAD:
				case KW_RELY:
				case KW_RENAME:
				case KW_REOPTIMIZATION:
				case KW_REPAIR:
				case KW_REPL:
				case KW_REPLACE:
				case KW_REPLICATION:
				case KW_RESOURCE:
				case KW_RESTRICT:
				case KW_REWRITE:
				case KW_ROLE:
				case KW_ROLES:
				case KW_SCHEDULING_POLICY:
				case KW_SCHEMA:
				case KW_SCHEMAS:
				case KW_SECOND:
				case KW_SEMI:
				case KW_SERDE:
				case KW_SERDEPROPERTIES:
				case KW_SERVER:
				case KW_SETS:
				case KW_SHARED:
				case KW_SHOW:
				case KW_SHOW_DATABASE:
				case KW_SKEWED:
				case KW_SNAPSHOT:
				case KW_SORT:
				case KW_SORTED:
				case KW_SSL:
				case KW_STATISTICS:
				case KW_STATUS:
				case KW_STORED:
				case KW_STREAMTABLE:
				case KW_STRING:
				case KW_STRUCT:
				case KW_SUMMARY:
				case KW_TABLES:
				case KW_TBLPROPERTIES:
				case KW_TEMPORARY:
				case KW_TERMINATED:
				case KW_TINYINT:
				case KW_TOUCH:
				case KW_TRANSACTION:
				case KW_TRANSACTIONS:
				case KW_UNARCHIVE:
				case KW_UNDO:
				case KW_UNIONTYPE:
				case KW_UNLOCK:
				case KW_UNMANAGED:
				case KW_UNSET:
				case KW_UNSIGNED:
				case KW_URI:
				case KW_USE:
				case KW_UTC:
				case KW_UTCTIMESTAMP:
				case KW_VALIDATE:
				case KW_VALUE_TYPE:
				case KW_VECTORIZATION:
				case KW_VIEW:
				case KW_VIEWS:
				case KW_WAIT:
				case KW_WEEK:
				case KW_WHILE:
				case KW_WORK:
				case KW_WORKLOAD:
				case KW_WRITE:
				case KW_YEAR:
				case KW_ZONE:
				case KW_BATCH:
				case KW_DAYOFWEEK:
				case KW_HOLD_DDLTIME:
				case KW_IGNORE:
				case KW_NO_DROP:
				case KW_OFFLINE:
				case KW_PROTECTION:
				case KW_READONLY:
				case KW_TIMESTAMPTZ:
					{
					int LA121_15 = input.LA(3);
					if ( (synpred9_HiveParser()) ) {
						alt121=3;
					}
					else if ( (true) ) {
						alt121=4;
					}

					}
					break;
				case EOF:
				case DOT:
				case KW_PARTITION:
					{
					alt121=4;
					}
					break;
				default:
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 121, 4, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}
			}
			else if ( (LA121_0==KW_EXTENDED) && (synpred9_HiveParser())) {
				alt121=3;
			}
			else if ( (LA121_0==Identifier||(LA121_0 >= KW_ABORT && LA121_0 <= KW_AFTER)||LA121_0==KW_ALLOC_FRACTION||LA121_0==KW_ANALYZE||LA121_0==KW_ARCHIVE||LA121_0==KW_ASC||(LA121_0 >= KW_AUTOCOMMIT && LA121_0 <= KW_BEFORE)||(LA121_0 >= KW_BUCKET && LA121_0 <= KW_BUCKETS)||(LA121_0 >= KW_CACHE && LA121_0 <= KW_CASCADE)||LA121_0==KW_CHANGE||(LA121_0 >= KW_CHECK && LA121_0 <= KW_COLLECTION)||(LA121_0 >= KW_COLUMNS && LA121_0 <= KW_COMMENT)||(LA121_0 >= KW_COMPACT && LA121_0 <= KW_CONCATENATE)||LA121_0==KW_CONTINUE||LA121_0==KW_DATA||LA121_0==KW_DATABASES||(LA121_0 >= KW_DATETIME && LA121_0 <= KW_DBPROPERTIES)||(LA121_0 >= KW_DEFAULT && LA121_0 <= KW_DEFINED)||(LA121_0 >= KW_DELIMITED && LA121_0 <= KW_DESC)||(LA121_0 >= KW_DETAIL && LA121_0 <= KW_DISABLE)||(LA121_0 >= KW_DISTRIBUTE && LA121_0 <= KW_DO)||LA121_0==KW_DOW||(LA121_0 >= KW_DUMP && LA121_0 <= KW_ELEM_TYPE)||LA121_0==KW_ENABLE||(LA121_0 >= KW_ENFORCED && LA121_0 <= KW_ESCAPED)||LA121_0==KW_EXCLUSIVE||(LA121_0 >= KW_EXPLAIN && LA121_0 <= KW_EXPRESSION)||(LA121_0 >= KW_FIELDS && LA121_0 <= KW_FIRST)||LA121_0==KW_FORMAT||LA121_0==KW_FUNCTIONS||(LA121_0 >= KW_HOUR && LA121_0 <= KW_IDXPROPERTIES)||(LA121_0 >= KW_INDEX && LA121_0 <= KW_INDEXES)||(LA121_0 >= KW_INPATH && LA121_0 <= KW_INPUTFORMAT)||(LA121_0 >= KW_ISOLATION && LA121_0 <= KW_JAR)||(LA121_0 >= KW_KEY && LA121_0 <= KW_LAST)||LA121_0==KW_LEVEL||(LA121_0 >= KW_LIMIT && LA121_0 <= KW_LOAD)||(LA121_0 >= KW_LOCATION && LA121_0 <= KW_LONG)||LA121_0==KW_MANAGEMENT||(LA121_0 >= KW_MAPJOIN && LA121_0 <= KW_MATERIALIZED)||LA121_0==KW_METADATA||(LA121_0 >= KW_MINUTE && LA121_0 <= KW_MONTH)||(LA121_0 >= KW_MOVE && LA121_0 <= KW_MSCK)||(LA121_0 >= KW_NORELY && LA121_0 <= KW_NOSCAN)||LA121_0==KW_NOVALIDATE||LA121_0==KW_NULLS||LA121_0==KW_OFFSET||(LA121_0 >= KW_OPERATOR && LA121_0 <= KW_OPTION)||(LA121_0 >= KW_OUTPUTDRIVER && LA121_0 <= KW_OUTPUTFORMAT)||(LA121_0 >= KW_OVERWRITE && LA121_0 <= KW_OWNER)||(LA121_0 >= KW_PARTITIONED && LA121_0 <= KW_PATH)||(LA121_0 >= KW_PLAN && LA121_0 <= KW_POOL)||LA121_0==KW_PRINCIPALS||(LA121_0 >= KW_PURGE && LA121_0 <= KW_QUERY_PARALLELISM)||LA121_0==KW_READ||(LA121_0 >= KW_REBUILD && LA121_0 <= KW_RECORDWRITER)||(LA121_0 >= KW_RELOAD && LA121_0 <= KW_RESTRICT)||LA121_0==KW_REWRITE||(LA121_0 >= KW_ROLE && LA121_0 <= KW_ROLES)||LA121_0==KW_SCHEDULING_POLICY||(LA121_0 >= KW_SCHEMAS && LA121_0 <= KW_SECOND)||(LA121_0 >= KW_SEMI && LA121_0 <= KW_SERVER)||(LA121_0 >= KW_SETS && LA121_0 <= KW_SKEWED)||(LA121_0 >= KW_SNAPSHOT && LA121_0 <= KW_SSL)||(LA121_0 >= KW_STATISTICS && LA121_0 <= KW_SUMMARY)||LA121_0==KW_TABLES||(LA121_0 >= KW_TBLPROPERTIES && LA121_0 <= KW_TERMINATED)||LA121_0==KW_TINYINT||(LA121_0 >= KW_TOUCH && LA121_0 <= KW_TRANSACTIONS)||LA121_0==KW_UNARCHIVE||LA121_0==KW_UNDO||LA121_0==KW_UNIONTYPE||(LA121_0 >= KW_UNLOCK && LA121_0 <= KW_UNSIGNED)||(LA121_0 >= KW_URI && LA121_0 <= KW_USE)||(LA121_0 >= KW_UTC && LA121_0 <= KW_VALIDATE)||LA121_0==KW_VALUE_TYPE||(LA121_0 >= KW_VECTORIZATION && LA121_0 <= KW_WEEK)||LA121_0==KW_WHILE||(LA121_0 >= KW_WORK && LA121_0 <= KW_ZONE)||LA121_0==KW_BATCH||LA121_0==KW_DAYOFWEEK||LA121_0==KW_HOLD_DDLTIME||LA121_0==KW_IGNORE||LA121_0==KW_NO_DROP||LA121_0==KW_OFFLINE||LA121_0==KW_PROTECTION||LA121_0==KW_READONLY||LA121_0==KW_TIMESTAMPTZ) ) {
				alt121=4;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 121, 0, input);
				throw nvae;
			}

			switch (alt121) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:5: ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier )
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:32: ( KW_DATABASE | KW_SCHEMA )
					int alt117=2;
					int LA117_0 = input.LA(1);
					if ( (LA117_0==KW_DATABASE) ) {
						alt117=1;
					}
					else if ( (LA117_0==KW_SCHEMA) ) {
						alt117=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 117, 0, input);
						throw nvae;
					}

					switch (alt117) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:33: KW_DATABASE
							{
							KW_DATABASE414=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_descStatement7330); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE414);

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:45: KW_SCHEMA
							{
							KW_SCHEMA415=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_descStatement7332); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA415);

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:56: ( KW_EXTENDED )?
					int alt118=2;
					int LA118_0 = input.LA(1);
					if ( (LA118_0==KW_EXTENDED) ) {
						alt118=1;
					}
					switch (alt118) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:56: KW_EXTENDED
							{
							KW_EXTENDED416=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_descStatement7335); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_EXTENDED.add(KW_EXTENDED416);

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:69: (dbName= identifier )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:70: dbName= identifier
					{
					pushFollow(FOLLOW_identifier_in_descStatement7341);
					dbName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
					}

					// AST REWRITE
					// elements: KW_EXTENDED, dbName
					// token labels: 
					// rule labels: dbName, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1542:89: -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:92: ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESCDATABASE, "TOK_DESCDATABASE"), root_1);
						adaptor.addChild(root_1, stream_dbName.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:119: ( KW_EXTENDED )?
						if ( stream_KW_EXTENDED.hasNext() ) {
							adaptor.addChild(root_1, stream_KW_EXTENDED.nextNode());
						}
						stream_KW_EXTENDED.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:5: ( KW_FUNCTION )=> KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames )
					{
					KW_FUNCTION417=(Token)match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_descStatement7372); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_FUNCTION.add(KW_FUNCTION417);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:34: ( KW_EXTENDED )?
					int alt119=2;
					int LA119_0 = input.LA(1);
					if ( (LA119_0==KW_EXTENDED) ) {
						alt119=1;
					}
					switch (alt119) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:34: KW_EXTENDED
							{
							KW_EXTENDED418=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_descStatement7374); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_EXTENDED.add(KW_EXTENDED418);

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:47: (name= descFuncNames )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:48: name= descFuncNames
					{
					pushFollow(FOLLOW_descFuncNames_in_descStatement7380);
					name=descFuncNames();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_descFuncNames.add(name.getTree());
					}

					// AST REWRITE
					// elements: KW_EXTENDED, name
					// token labels: 
					// rule labels: name, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1544:68: -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:71: ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESCFUNCTION, "TOK_DESCFUNCTION"), root_1);
						adaptor.addChild(root_1, stream_name.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:96: ( KW_EXTENDED )?
						if ( stream_KW_EXTENDED.hasNext() ) {
							adaptor.addChild(root_1, stream_KW_EXTENDED.nextNode());
						}
						stream_KW_EXTENDED.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:5: ( KW_FORMATTED | KW_EXTENDED )=> ( (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED ) parttype= tabPartColTypeExpr )
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:35: ( (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED ) parttype= tabPartColTypeExpr )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:36: (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED ) parttype= tabPartColTypeExpr
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:36: (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED )
					int alt120=2;
					int LA120_0 = input.LA(1);
					if ( (LA120_0==KW_FORMATTED) ) {
						alt120=1;
					}
					else if ( (LA120_0==KW_EXTENDED) ) {
						alt120=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 120, 0, input);
						throw nvae;
					}

					switch (alt120) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:37: descOptions= KW_FORMATTED
							{
							descOptions=(Token)match(input,KW_FORMATTED,FOLLOW_KW_FORMATTED_in_descStatement7417); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_FORMATTED.add(descOptions);

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:62: descOptions= KW_EXTENDED
							{
							descOptions=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_descStatement7421); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_EXTENDED.add(descOptions);

							}
							break;

					}

					pushFollow(FOLLOW_tabPartColTypeExpr_in_descStatement7426);
					parttype=tabPartColTypeExpr();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tabPartColTypeExpr.add(parttype.getTree());
					}

					// AST REWRITE
					// elements: descOptions, parttype
					// token labels: descOptions
					// rule labels: parttype, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_descOptions=new RewriteRuleTokenStream(adaptor,"token descOptions",descOptions);
					RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1546:116: -> ^( TOK_DESCTABLE $parttype $descOptions)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:119: ^( TOK_DESCTABLE $parttype $descOptions)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESCTABLE, "TOK_DESCTABLE"), root_1);
						adaptor.addChild(root_1, stream_parttype.nextTree());
						adaptor.addChild(root_1, stream_descOptions.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1548:5: parttype= tabPartColTypeExpr
					{
					pushFollow(FOLLOW_tabPartColTypeExpr_in_descStatement7453);
					parttype=tabPartColTypeExpr();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tabPartColTypeExpr.add(parttype.getTree());
					// AST REWRITE
					// elements: parttype
					// token labels: 
					// rule labels: parttype, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1548:33: -> ^( TOK_DESCTABLE $parttype)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1548:36: ^( TOK_DESCTABLE $parttype)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESCTABLE, "TOK_DESCTABLE"), root_1);
						adaptor.addChild(root_1, stream_parttype.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "descStatement"


	public static class analyzeStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "analyzeStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1552:1: analyzeStatement : KW_ANALYZE KW_TABLE (parttype= tableOrPartition ) ( ( KW_COMPUTE )=> KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )? -> ^( TOK_ANALYZE $parttype ( $noscan)? ( KW_COLUMNS )? ( $statsColumnName)? ) | ( KW_CACHE )=> KW_CACHE KW_METADATA -> ^( TOK_CACHE_METADATA $parttype) ) ;
	public final HiveParser.analyzeStatement_return analyzeStatement() throws RecognitionException {
		HiveParser.analyzeStatement_return retval = new HiveParser.analyzeStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token noscan=null;
		Token KW_ANALYZE419=null;
		Token KW_TABLE420=null;
		Token KW_COMPUTE421=null;
		Token KW_STATISTICS422=null;
		Token KW_FOR423=null;
		Token KW_COLUMNS424=null;
		Token KW_CACHE425=null;
		Token KW_METADATA426=null;
		ParserRuleReturnScope parttype =null;
		ParserRuleReturnScope statsColumnName =null;

		ASTNode noscan_tree=null;
		ASTNode KW_ANALYZE419_tree=null;
		ASTNode KW_TABLE420_tree=null;
		ASTNode KW_COMPUTE421_tree=null;
		ASTNode KW_STATISTICS422_tree=null;
		ASTNode KW_FOR423_tree=null;
		ASTNode KW_COLUMNS424_tree=null;
		ASTNode KW_CACHE425_tree=null;
		ASTNode KW_METADATA426_tree=null;
		RewriteRuleTokenStream stream_KW_STATISTICS=new RewriteRuleTokenStream(adaptor,"token KW_STATISTICS");
		RewriteRuleTokenStream stream_KW_ANALYZE=new RewriteRuleTokenStream(adaptor,"token KW_ANALYZE");
		RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
		RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_COMPUTE=new RewriteRuleTokenStream(adaptor,"token KW_COMPUTE");
		RewriteRuleTokenStream stream_KW_METADATA=new RewriteRuleTokenStream(adaptor,"token KW_METADATA");
		RewriteRuleTokenStream stream_KW_NOSCAN=new RewriteRuleTokenStream(adaptor,"token KW_NOSCAN");
		RewriteRuleTokenStream stream_KW_CACHE=new RewriteRuleTokenStream(adaptor,"token KW_CACHE");
		RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");

		 pushMsg("analyze statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1555:5: ( KW_ANALYZE KW_TABLE (parttype= tableOrPartition ) ( ( KW_COMPUTE )=> KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )? -> ^( TOK_ANALYZE $parttype ( $noscan)? ( KW_COLUMNS )? ( $statsColumnName)? ) | ( KW_CACHE )=> KW_CACHE KW_METADATA -> ^( TOK_CACHE_METADATA $parttype) ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1555:7: KW_ANALYZE KW_TABLE (parttype= tableOrPartition ) ( ( KW_COMPUTE )=> KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )? -> ^( TOK_ANALYZE $parttype ( $noscan)? ( KW_COLUMNS )? ( $statsColumnName)? ) | ( KW_CACHE )=> KW_CACHE KW_METADATA -> ^( TOK_CACHE_METADATA $parttype) )
			{
			KW_ANALYZE419=(Token)match(input,KW_ANALYZE,FOLLOW_KW_ANALYZE_in_analyzeStatement7495); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ANALYZE.add(KW_ANALYZE419);

			KW_TABLE420=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_analyzeStatement7497); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE420);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1555:27: (parttype= tableOrPartition )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1555:28: parttype= tableOrPartition
			{
			pushFollow(FOLLOW_tableOrPartition_in_analyzeStatement7502);
			parttype=tableOrPartition();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableOrPartition.add(parttype.getTree());
			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1556:7: ( ( KW_COMPUTE )=> KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )? -> ^( TOK_ANALYZE $parttype ( $noscan)? ( KW_COLUMNS )? ( $statsColumnName)? ) | ( KW_CACHE )=> KW_CACHE KW_METADATA -> ^( TOK_CACHE_METADATA $parttype) )
			int alt124=2;
			int LA124_0 = input.LA(1);
			if ( (LA124_0==KW_COMPUTE) && (synpred10_HiveParser())) {
				alt124=1;
			}
			else if ( (LA124_0==KW_CACHE) && (synpred11_HiveParser())) {
				alt124=2;
			}

			switch (alt124) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:7: ( KW_COMPUTE )=> KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )?
					{
					KW_COMPUTE421=(Token)match(input,KW_COMPUTE,FOLLOW_KW_COMPUTE_in_analyzeStatement7525); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMPUTE.add(KW_COMPUTE421);

					KW_STATISTICS422=(Token)match(input,KW_STATISTICS,FOLLOW_KW_STATISTICS_in_analyzeStatement7527); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_STATISTICS.add(KW_STATISTICS422);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:48: ( (noscan= KW_NOSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )?
					int alt123=3;
					int LA123_0 = input.LA(1);
					if ( (LA123_0==KW_NOSCAN) ) {
						alt123=1;
					}
					else if ( (LA123_0==KW_FOR) ) {
						alt123=2;
					}
					switch (alt123) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:49: (noscan= KW_NOSCAN )
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:49: (noscan= KW_NOSCAN )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:50: noscan= KW_NOSCAN
							{
							noscan=(Token)match(input,KW_NOSCAN,FOLLOW_KW_NOSCAN_in_analyzeStatement7533); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_NOSCAN.add(noscan);

							}

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1558:57: ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? )
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1558:57: ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1558:58: KW_FOR KW_COLUMNS (statsColumnName= columnNameList )?
							{
							KW_FOR423=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_analyzeStatement7593); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_FOR.add(KW_FOR423);

							KW_COLUMNS424=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_analyzeStatement7595); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_COLUMNS.add(KW_COLUMNS424);

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1558:76: (statsColumnName= columnNameList )?
							int alt122=2;
							int LA122_0 = input.LA(1);
							if ( (LA122_0==Identifier||(LA122_0 >= KW_ABORT && LA122_0 <= KW_AFTER)||LA122_0==KW_ALLOC_FRACTION||LA122_0==KW_ANALYZE||LA122_0==KW_ARCHIVE||LA122_0==KW_ASC||(LA122_0 >= KW_AUTOCOMMIT && LA122_0 <= KW_BEFORE)||(LA122_0 >= KW_BUCKET && LA122_0 <= KW_BUCKETS)||(LA122_0 >= KW_CACHE && LA122_0 <= KW_CASCADE)||LA122_0==KW_CHANGE||(LA122_0 >= KW_CHECK && LA122_0 <= KW_COLLECTION)||(LA122_0 >= KW_COLUMNS && LA122_0 <= KW_COMMENT)||(LA122_0 >= KW_COMPACT && LA122_0 <= KW_CONCATENATE)||LA122_0==KW_CONTINUE||LA122_0==KW_DATA||LA122_0==KW_DATABASES||(LA122_0 >= KW_DATETIME && LA122_0 <= KW_DBPROPERTIES)||(LA122_0 >= KW_DEFAULT && LA122_0 <= KW_DEFINED)||(LA122_0 >= KW_DELIMITED && LA122_0 <= KW_DESC)||(LA122_0 >= KW_DETAIL && LA122_0 <= KW_DISABLE)||(LA122_0 >= KW_DISTRIBUTE && LA122_0 <= KW_DO)||LA122_0==KW_DOW||(LA122_0 >= KW_DUMP && LA122_0 <= KW_ELEM_TYPE)||LA122_0==KW_ENABLE||(LA122_0 >= KW_ENFORCED && LA122_0 <= KW_ESCAPED)||LA122_0==KW_EXCLUSIVE||(LA122_0 >= KW_EXPLAIN && LA122_0 <= KW_EXPRESSION)||(LA122_0 >= KW_FIELDS && LA122_0 <= KW_FIRST)||(LA122_0 >= KW_FORMAT && LA122_0 <= KW_FORMATTED)||LA122_0==KW_FUNCTIONS||(LA122_0 >= KW_HOUR && LA122_0 <= KW_IDXPROPERTIES)||(LA122_0 >= KW_INDEX && LA122_0 <= KW_INDEXES)||(LA122_0 >= KW_INPATH && LA122_0 <= KW_INPUTFORMAT)||(LA122_0 >= KW_ISOLATION && LA122_0 <= KW_JAR)||(LA122_0 >= KW_KEY && LA122_0 <= KW_LAST)||LA122_0==KW_LEVEL||(LA122_0 >= KW_LIMIT && LA122_0 <= KW_LOAD)||(LA122_0 >= KW_LOCATION && LA122_0 <= KW_LONG)||LA122_0==KW_MANAGEMENT||(LA122_0 >= KW_MAPJOIN && LA122_0 <= KW_MATERIALIZED)||LA122_0==KW_METADATA||(LA122_0 >= KW_MINUTE && LA122_0 <= KW_MONTH)||(LA122_0 >= KW_MOVE && LA122_0 <= KW_MSCK)||(LA122_0 >= KW_NORELY && LA122_0 <= KW_NOSCAN)||LA122_0==KW_NOVALIDATE||LA122_0==KW_NULLS||LA122_0==KW_OFFSET||(LA122_0 >= KW_OPERATOR && LA122_0 <= KW_OPTION)||(LA122_0 >= KW_OUTPUTDRIVER && LA122_0 <= KW_OUTPUTFORMAT)||(LA122_0 >= KW_OVERWRITE && LA122_0 <= KW_OWNER)||(LA122_0 >= KW_PARTITIONED && LA122_0 <= KW_PATH)||(LA122_0 >= KW_PLAN && LA122_0 <= KW_POOL)||LA122_0==KW_PRINCIPALS||(LA122_0 >= KW_PURGE && LA122_0 <= KW_QUERY_PARALLELISM)||LA122_0==KW_READ||(LA122_0 >= KW_REBUILD && LA122_0 <= KW_RECORDWRITER)||(LA122_0 >= KW_RELOAD && LA122_0 <= KW_RESTRICT)||LA122_0==KW_REWRITE||(LA122_0 >= KW_ROLE && LA122_0 <= KW_ROLES)||(LA122_0 >= KW_SCHEDULING_POLICY && LA122_0 <= KW_SECOND)||(LA122_0 >= KW_SEMI && LA122_0 <= KW_SERVER)||(LA122_0 >= KW_SETS && LA122_0 <= KW_SKEWED)||(LA122_0 >= KW_SNAPSHOT && LA122_0 <= KW_SSL)||(LA122_0 >= KW_STATISTICS && LA122_0 <= KW_SUMMARY)||LA122_0==KW_TABLES||(LA122_0 >= KW_TBLPROPERTIES && LA122_0 <= KW_TERMINATED)||LA122_0==KW_TINYINT||(LA122_0 >= KW_TOUCH && LA122_0 <= KW_TRANSACTIONS)||LA122_0==KW_UNARCHIVE||LA122_0==KW_UNDO||LA122_0==KW_UNIONTYPE||(LA122_0 >= KW_UNLOCK && LA122_0 <= KW_UNSIGNED)||(LA122_0 >= KW_URI && LA122_0 <= KW_USE)||(LA122_0 >= KW_UTC && LA122_0 <= KW_VALIDATE)||LA122_0==KW_VALUE_TYPE||(LA122_0 >= KW_VECTORIZATION && LA122_0 <= KW_WEEK)||LA122_0==KW_WHILE||(LA122_0 >= KW_WORK && LA122_0 <= KW_ZONE)||LA122_0==KW_BATCH||LA122_0==KW_DAYOFWEEK||LA122_0==KW_HOLD_DDLTIME||LA122_0==KW_IGNORE||LA122_0==KW_NO_DROP||LA122_0==KW_OFFLINE||LA122_0==KW_PROTECTION||LA122_0==KW_READONLY||LA122_0==KW_TIMESTAMPTZ) ) {
								alt122=1;
							}
							switch (alt122) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1558:77: statsColumnName= columnNameList
									{
									pushFollow(FOLLOW_columnNameList_in_analyzeStatement7600);
									statsColumnName=columnNameList();
									state._fsp--;
									if (state.failed) return retval;
									if ( state.backtracking==0 ) stream_columnNameList.add(statsColumnName.getTree());
									}
									break;

							}

							}

							}
							break;

					}

					// AST REWRITE
					// elements: noscan, KW_COLUMNS, parttype, statsColumnName
					// token labels: noscan
					// rule labels: statsColumnName, parttype, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_noscan=new RewriteRuleTokenStream(adaptor,"token noscan",noscan);
					RewriteRuleSubtreeStream stream_statsColumnName=new RewriteRuleSubtreeStream(adaptor,"rule statsColumnName",statsColumnName!=null?statsColumnName.getTree():null);
					RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1559:7: -> ^( TOK_ANALYZE $parttype ( $noscan)? ( KW_COLUMNS )? ( $statsColumnName)? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1559:10: ^( TOK_ANALYZE $parttype ( $noscan)? ( KW_COLUMNS )? ( $statsColumnName)? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ANALYZE, "TOK_ANALYZE"), root_1);
						adaptor.addChild(root_1, stream_parttype.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1559:35: ( $noscan)?
						if ( stream_noscan.hasNext() ) {
							adaptor.addChild(root_1, stream_noscan.nextNode());
						}
						stream_noscan.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1559:43: ( KW_COLUMNS )?
						if ( stream_KW_COLUMNS.hasNext() ) {
							adaptor.addChild(root_1, stream_KW_COLUMNS.nextNode());
						}
						stream_KW_COLUMNS.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1559:56: ( $statsColumnName)?
						if ( stream_statsColumnName.hasNext() ) {
							adaptor.addChild(root_1, stream_statsColumnName.nextTree());
						}
						stream_statsColumnName.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1561:7: ( KW_CACHE )=> KW_CACHE KW_METADATA
					{
					KW_CACHE425=(Token)match(input,KW_CACHE,FOLLOW_KW_CACHE_in_analyzeStatement7653); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CACHE.add(KW_CACHE425);

					KW_METADATA426=(Token)match(input,KW_METADATA,FOLLOW_KW_METADATA_in_analyzeStatement7655); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_METADATA.add(KW_METADATA426);

					// AST REWRITE
					// elements: parttype
					// token labels: 
					// rule labels: parttype, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1561:42: -> ^( TOK_CACHE_METADATA $parttype)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1561:45: ^( TOK_CACHE_METADATA $parttype)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CACHE_METADATA, "TOK_CACHE_METADATA"), root_1);
						adaptor.addChild(root_1, stream_parttype.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "analyzeStatement"


	public static class showStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1565:1: showStatement : ( KW_SHOW ( KW_DATABASES | KW_SCHEMAS ) ( KW_LIKE showStmtIdentifier )? -> ^( TOK_SHOWDATABASES ( showStmtIdentifier )? ) | KW_SHOW KW_TABLES ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_VIEWS ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_MATERIALIZED KW_VIEWS ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWMATERIALIZEDVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_COLUMNS ( KW_FROM | KW_IN ) tableName ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWCOLUMNS tableName ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_FUNCTIONS ( KW_LIKE showFunctionIdentifier | showFunctionIdentifier )? -> ^( TOK_SHOWFUNCTIONS ( KW_LIKE )? ( showFunctionIdentifier )? ) | KW_SHOW KW_PARTITIONS tabName= tableName ( partitionSpec )? -> ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? ) | KW_SHOW KW_CREATE ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) db_name= identifier -> ^( TOK_SHOW_CREATEDATABASE $db_name) | KW_TABLE tabName= tableName -> ^( TOK_SHOW_CREATETABLE $tabName) ) | KW_SHOW KW_TABLE KW_EXTENDED ( ( KW_FROM | KW_IN ) db_name= identifier )? KW_LIKE showStmtIdentifier ( partitionSpec )? -> ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? ) | KW_SHOW KW_TBLPROPERTIES tableName ( LPAREN prptyName= StringLiteral RPAREN )? -> ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? ) | KW_SHOW KW_LOCKS ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? ) | (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? ) ) | KW_SHOW KW_COMPACTIONS -> ^( TOK_SHOW_COMPACTIONS ) | KW_SHOW KW_TRANSACTIONS -> ^( TOK_SHOW_TRANSACTIONS ) | KW_SHOW KW_CONF StringLiteral -> ^( TOK_SHOWCONF StringLiteral ) | KW_SHOW KW_RESOURCE ( ( KW_PLAN rp_name= identifier -> ^( TOK_SHOW_RP $rp_name) ) | ( KW_PLANS -> ^( TOK_SHOW_RP ) ) ) );
	public final HiveParser.showStatement_return showStatement() throws RecognitionException {
		HiveParser.showStatement_return retval = new HiveParser.showStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token prptyName=null;
		Token isExtended=null;
		Token KW_SHOW427=null;
		Token KW_DATABASES428=null;
		Token KW_SCHEMAS429=null;
		Token KW_LIKE430=null;
		Token KW_SHOW432=null;
		Token KW_TABLES433=null;
		Token KW_FROM434=null;
		Token KW_IN435=null;
		Token KW_LIKE436=null;
		Token KW_SHOW439=null;
		Token KW_VIEWS440=null;
		Token KW_FROM441=null;
		Token KW_IN442=null;
		Token KW_LIKE443=null;
		Token KW_SHOW446=null;
		Token KW_MATERIALIZED447=null;
		Token KW_VIEWS448=null;
		Token KW_FROM449=null;
		Token KW_IN450=null;
		Token KW_LIKE451=null;
		Token KW_SHOW454=null;
		Token KW_COLUMNS455=null;
		Token KW_FROM456=null;
		Token KW_IN457=null;
		Token KW_FROM459=null;
		Token KW_IN460=null;
		Token KW_LIKE461=null;
		Token KW_SHOW464=null;
		Token KW_FUNCTIONS465=null;
		Token KW_LIKE466=null;
		Token KW_SHOW469=null;
		Token KW_PARTITIONS470=null;
		Token KW_SHOW472=null;
		Token KW_CREATE473=null;
		Token KW_DATABASE474=null;
		Token KW_SCHEMA475=null;
		Token KW_TABLE476=null;
		Token KW_SHOW477=null;
		Token KW_TABLE478=null;
		Token KW_EXTENDED479=null;
		Token KW_FROM480=null;
		Token KW_IN481=null;
		Token KW_LIKE482=null;
		Token KW_SHOW485=null;
		Token KW_TBLPROPERTIES486=null;
		Token LPAREN488=null;
		Token RPAREN489=null;
		Token KW_SHOW490=null;
		Token KW_LOCKS491=null;
		Token KW_DATABASE492=null;
		Token KW_SCHEMA493=null;
		Token KW_SHOW494=null;
		Token KW_COMPACTIONS495=null;
		Token KW_SHOW496=null;
		Token KW_TRANSACTIONS497=null;
		Token KW_SHOW498=null;
		Token KW_CONF499=null;
		Token StringLiteral500=null;
		Token KW_SHOW501=null;
		Token KW_RESOURCE502=null;
		Token KW_PLAN503=null;
		Token KW_PLANS504=null;
		ParserRuleReturnScope db_name =null;
		ParserRuleReturnScope tabName =null;
		ParserRuleReturnScope dbName =null;
		ParserRuleReturnScope parttype =null;
		ParserRuleReturnScope rp_name =null;
		ParserRuleReturnScope showStmtIdentifier431 =null;
		ParserRuleReturnScope showStmtIdentifier437 =null;
		ParserRuleReturnScope showStmtIdentifier438 =null;
		ParserRuleReturnScope showStmtIdentifier444 =null;
		ParserRuleReturnScope showStmtIdentifier445 =null;
		ParserRuleReturnScope showStmtIdentifier452 =null;
		ParserRuleReturnScope showStmtIdentifier453 =null;
		ParserRuleReturnScope tableName458 =null;
		ParserRuleReturnScope showStmtIdentifier462 =null;
		ParserRuleReturnScope showStmtIdentifier463 =null;
		ParserRuleReturnScope showFunctionIdentifier467 =null;
		ParserRuleReturnScope showFunctionIdentifier468 =null;
		ParserRuleReturnScope partitionSpec471 =null;
		ParserRuleReturnScope showStmtIdentifier483 =null;
		ParserRuleReturnScope partitionSpec484 =null;
		ParserRuleReturnScope tableName487 =null;

		ASTNode prptyName_tree=null;
		ASTNode isExtended_tree=null;
		ASTNode KW_SHOW427_tree=null;
		ASTNode KW_DATABASES428_tree=null;
		ASTNode KW_SCHEMAS429_tree=null;
		ASTNode KW_LIKE430_tree=null;
		ASTNode KW_SHOW432_tree=null;
		ASTNode KW_TABLES433_tree=null;
		ASTNode KW_FROM434_tree=null;
		ASTNode KW_IN435_tree=null;
		ASTNode KW_LIKE436_tree=null;
		ASTNode KW_SHOW439_tree=null;
		ASTNode KW_VIEWS440_tree=null;
		ASTNode KW_FROM441_tree=null;
		ASTNode KW_IN442_tree=null;
		ASTNode KW_LIKE443_tree=null;
		ASTNode KW_SHOW446_tree=null;
		ASTNode KW_MATERIALIZED447_tree=null;
		ASTNode KW_VIEWS448_tree=null;
		ASTNode KW_FROM449_tree=null;
		ASTNode KW_IN450_tree=null;
		ASTNode KW_LIKE451_tree=null;
		ASTNode KW_SHOW454_tree=null;
		ASTNode KW_COLUMNS455_tree=null;
		ASTNode KW_FROM456_tree=null;
		ASTNode KW_IN457_tree=null;
		ASTNode KW_FROM459_tree=null;
		ASTNode KW_IN460_tree=null;
		ASTNode KW_LIKE461_tree=null;
		ASTNode KW_SHOW464_tree=null;
		ASTNode KW_FUNCTIONS465_tree=null;
		ASTNode KW_LIKE466_tree=null;
		ASTNode KW_SHOW469_tree=null;
		ASTNode KW_PARTITIONS470_tree=null;
		ASTNode KW_SHOW472_tree=null;
		ASTNode KW_CREATE473_tree=null;
		ASTNode KW_DATABASE474_tree=null;
		ASTNode KW_SCHEMA475_tree=null;
		ASTNode KW_TABLE476_tree=null;
		ASTNode KW_SHOW477_tree=null;
		ASTNode KW_TABLE478_tree=null;
		ASTNode KW_EXTENDED479_tree=null;
		ASTNode KW_FROM480_tree=null;
		ASTNode KW_IN481_tree=null;
		ASTNode KW_LIKE482_tree=null;
		ASTNode KW_SHOW485_tree=null;
		ASTNode KW_TBLPROPERTIES486_tree=null;
		ASTNode LPAREN488_tree=null;
		ASTNode RPAREN489_tree=null;
		ASTNode KW_SHOW490_tree=null;
		ASTNode KW_LOCKS491_tree=null;
		ASTNode KW_DATABASE492_tree=null;
		ASTNode KW_SCHEMA493_tree=null;
		ASTNode KW_SHOW494_tree=null;
		ASTNode KW_COMPACTIONS495_tree=null;
		ASTNode KW_SHOW496_tree=null;
		ASTNode KW_TRANSACTIONS497_tree=null;
		ASTNode KW_SHOW498_tree=null;
		ASTNode KW_CONF499_tree=null;
		ASTNode StringLiteral500_tree=null;
		ASTNode KW_SHOW501_tree=null;
		ASTNode KW_RESOURCE502_tree=null;
		ASTNode KW_PLAN503_tree=null;
		ASTNode KW_PLANS504_tree=null;
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_KW_VIEWS=new RewriteRuleTokenStream(adaptor,"token KW_VIEWS");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_LIKE=new RewriteRuleTokenStream(adaptor,"token KW_LIKE");
		RewriteRuleTokenStream stream_KW_PARTITIONS=new RewriteRuleTokenStream(adaptor,"token KW_PARTITIONS");
		RewriteRuleTokenStream stream_KW_IN=new RewriteRuleTokenStream(adaptor,"token KW_IN");
		RewriteRuleTokenStream stream_KW_LOCKS=new RewriteRuleTokenStream(adaptor,"token KW_LOCKS");
		RewriteRuleTokenStream stream_KW_TABLES=new RewriteRuleTokenStream(adaptor,"token KW_TABLES");
		RewriteRuleTokenStream stream_KW_FUNCTIONS=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTIONS");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_KW_EXTENDED=new RewriteRuleTokenStream(adaptor,"token KW_EXTENDED");
		RewriteRuleTokenStream stream_KW_CONF=new RewriteRuleTokenStream(adaptor,"token KW_CONF");
		RewriteRuleTokenStream stream_KW_PLAN=new RewriteRuleTokenStream(adaptor,"token KW_PLAN");
		RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
		RewriteRuleTokenStream stream_KW_TRANSACTIONS=new RewriteRuleTokenStream(adaptor,"token KW_TRANSACTIONS");
		RewriteRuleTokenStream stream_KW_SCHEMAS=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMAS");
		RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_COMPACTIONS=new RewriteRuleTokenStream(adaptor,"token KW_COMPACTIONS");
		RewriteRuleTokenStream stream_KW_PLANS=new RewriteRuleTokenStream(adaptor,"token KW_PLANS");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_RESOURCE=new RewriteRuleTokenStream(adaptor,"token KW_RESOURCE");
		RewriteRuleTokenStream stream_KW_DATABASES=new RewriteRuleTokenStream(adaptor,"token KW_DATABASES");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
		RewriteRuleTokenStream stream_KW_MATERIALIZED=new RewriteRuleTokenStream(adaptor,"token KW_MATERIALIZED");
		RewriteRuleTokenStream stream_KW_TBLPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_TBLPROPERTIES");
		RewriteRuleSubtreeStream stream_showStmtIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule showStmtIdentifier");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_showFunctionIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule showFunctionIdentifier");
		RewriteRuleSubtreeStream stream_partTypeExpr=new RewriteRuleSubtreeStream(adaptor,"rule partTypeExpr");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("show statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:5: ( KW_SHOW ( KW_DATABASES | KW_SCHEMAS ) ( KW_LIKE showStmtIdentifier )? -> ^( TOK_SHOWDATABASES ( showStmtIdentifier )? ) | KW_SHOW KW_TABLES ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_VIEWS ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_MATERIALIZED KW_VIEWS ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWMATERIALIZEDVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_COLUMNS ( KW_FROM | KW_IN ) tableName ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWCOLUMNS tableName ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_FUNCTIONS ( KW_LIKE showFunctionIdentifier | showFunctionIdentifier )? -> ^( TOK_SHOWFUNCTIONS ( KW_LIKE )? ( showFunctionIdentifier )? ) | KW_SHOW KW_PARTITIONS tabName= tableName ( partitionSpec )? -> ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? ) | KW_SHOW KW_CREATE ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) db_name= identifier -> ^( TOK_SHOW_CREATEDATABASE $db_name) | KW_TABLE tabName= tableName -> ^( TOK_SHOW_CREATETABLE $tabName) ) | KW_SHOW KW_TABLE KW_EXTENDED ( ( KW_FROM | KW_IN ) db_name= identifier )? KW_LIKE showStmtIdentifier ( partitionSpec )? -> ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? ) | KW_SHOW KW_TBLPROPERTIES tableName ( LPAREN prptyName= StringLiteral RPAREN )? -> ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? ) | KW_SHOW KW_LOCKS ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? ) | (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? ) ) | KW_SHOW KW_COMPACTIONS -> ^( TOK_SHOW_COMPACTIONS ) | KW_SHOW KW_TRANSACTIONS -> ^( TOK_SHOW_TRANSACTIONS ) | KW_SHOW KW_CONF StringLiteral -> ^( TOK_SHOWCONF StringLiteral ) | KW_SHOW KW_RESOURCE ( ( KW_PLAN rp_name= identifier -> ^( TOK_SHOW_RP $rp_name) ) | ( KW_PLANS -> ^( TOK_SHOW_RP ) ) ) )
			int alt154=15;
			int LA154_0 = input.LA(1);
			if ( (LA154_0==KW_SHOW) ) {
				switch ( input.LA(2) ) {
				case KW_TABLES:
					{
					alt154=2;
					}
					break;
				case KW_VIEWS:
					{
					alt154=3;
					}
					break;
				case KW_MATERIALIZED:
					{
					alt154=4;
					}
					break;
				case KW_COLUMNS:
					{
					alt154=5;
					}
					break;
				case KW_FUNCTIONS:
					{
					alt154=6;
					}
					break;
				case KW_PARTITIONS:
					{
					alt154=7;
					}
					break;
				case KW_CREATE:
					{
					alt154=8;
					}
					break;
				case KW_TABLE:
					{
					alt154=9;
					}
					break;
				case KW_TBLPROPERTIES:
					{
					alt154=10;
					}
					break;
				case KW_LOCKS:
					{
					alt154=11;
					}
					break;
				case KW_COMPACTIONS:
					{
					alt154=12;
					}
					break;
				case KW_TRANSACTIONS:
					{
					alt154=13;
					}
					break;
				case KW_CONF:
					{
					alt154=14;
					}
					break;
				case KW_RESOURCE:
					{
					alt154=15;
					}
					break;
				case KW_DATABASES:
				case KW_SCHEMAS:
					{
					alt154=1;
					}
					break;
				default:
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 154, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 154, 0, input);
				throw nvae;
			}

			switch (alt154) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:7: KW_SHOW ( KW_DATABASES | KW_SCHEMAS ) ( KW_LIKE showStmtIdentifier )?
					{
					KW_SHOW427=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7699); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW427);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:15: ( KW_DATABASES | KW_SCHEMAS )
					int alt125=2;
					int LA125_0 = input.LA(1);
					if ( (LA125_0==KW_DATABASES) ) {
						alt125=1;
					}
					else if ( (LA125_0==KW_SCHEMAS) ) {
						alt125=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 125, 0, input);
						throw nvae;
					}

					switch (alt125) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:16: KW_DATABASES
							{
							KW_DATABASES428=(Token)match(input,KW_DATABASES,FOLLOW_KW_DATABASES_in_showStatement7702); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DATABASES.add(KW_DATABASES428);

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:29: KW_SCHEMAS
							{
							KW_SCHEMAS429=(Token)match(input,KW_SCHEMAS,FOLLOW_KW_SCHEMAS_in_showStatement7704); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_SCHEMAS.add(KW_SCHEMAS429);

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:41: ( KW_LIKE showStmtIdentifier )?
					int alt126=2;
					int LA126_0 = input.LA(1);
					if ( (LA126_0==KW_LIKE) ) {
						alt126=1;
					}
					switch (alt126) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:42: KW_LIKE showStmtIdentifier
							{
							KW_LIKE430=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement7708); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LIKE.add(KW_LIKE430);

							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7710);
							showStmtIdentifier431=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier431.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: showStmtIdentifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1568:71: -> ^( TOK_SHOWDATABASES ( showStmtIdentifier )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:74: ^( TOK_SHOWDATABASES ( showStmtIdentifier )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWDATABASES, "TOK_SHOWDATABASES"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1568:94: ( showStmtIdentifier )?
						if ( stream_showStmtIdentifier.hasNext() ) {
							adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());
						}
						stream_showStmtIdentifier.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:7: KW_SHOW KW_TABLES ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					{
					KW_SHOW432=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7729); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW432);

					KW_TABLES433=(Token)match(input,KW_TABLES,FOLLOW_KW_TABLES_in_showStatement7731); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TABLES.add(KW_TABLES433);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:25: ( ( KW_FROM | KW_IN ) db_name= identifier )?
					int alt128=2;
					int LA128_0 = input.LA(1);
					if ( (LA128_0==KW_FROM||LA128_0==KW_IN) ) {
						alt128=1;
					}
					switch (alt128) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:26: ( KW_FROM | KW_IN ) db_name= identifier
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:26: ( KW_FROM | KW_IN )
							int alt127=2;
							int LA127_0 = input.LA(1);
							if ( (LA127_0==KW_FROM) ) {
								alt127=1;
							}
							else if ( (LA127_0==KW_IN) ) {
								alt127=2;
							}

							else {
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 127, 0, input);
								throw nvae;
							}

							switch (alt127) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:27: KW_FROM
									{
									KW_FROM434=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement7735); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM434);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:35: KW_IN
									{
									KW_IN435=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement7737); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_IN.add(KW_IN435);

									}
									break;

							}

							pushFollow(FOLLOW_identifier_in_showStatement7742);
							db_name=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(db_name.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:63: ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					int alt129=3;
					int LA129_0 = input.LA(1);
					if ( (LA129_0==KW_LIKE) ) {
						alt129=1;
					}
					else if ( (LA129_0==Identifier||(LA129_0 >= KW_ABORT && LA129_0 <= KW_AFTER)||LA129_0==KW_ALLOC_FRACTION||LA129_0==KW_ANALYZE||LA129_0==KW_ARCHIVE||LA129_0==KW_ASC||(LA129_0 >= KW_AUTOCOMMIT && LA129_0 <= KW_BEFORE)||(LA129_0 >= KW_BUCKET && LA129_0 <= KW_BUCKETS)||(LA129_0 >= KW_CACHE && LA129_0 <= KW_CASCADE)||LA129_0==KW_CHANGE||(LA129_0 >= KW_CHECK && LA129_0 <= KW_COLLECTION)||(LA129_0 >= KW_COLUMNS && LA129_0 <= KW_COMMENT)||(LA129_0 >= KW_COMPACT && LA129_0 <= KW_CONCATENATE)||LA129_0==KW_CONTINUE||LA129_0==KW_DATA||LA129_0==KW_DATABASES||(LA129_0 >= KW_DATETIME && LA129_0 <= KW_DBPROPERTIES)||(LA129_0 >= KW_DEFAULT && LA129_0 <= KW_DEFINED)||(LA129_0 >= KW_DELIMITED && LA129_0 <= KW_DESC)||(LA129_0 >= KW_DETAIL && LA129_0 <= KW_DISABLE)||(LA129_0 >= KW_DISTRIBUTE && LA129_0 <= KW_DO)||LA129_0==KW_DOW||(LA129_0 >= KW_DUMP && LA129_0 <= KW_ELEM_TYPE)||LA129_0==KW_ENABLE||(LA129_0 >= KW_ENFORCED && LA129_0 <= KW_ESCAPED)||LA129_0==KW_EXCLUSIVE||(LA129_0 >= KW_EXPLAIN && LA129_0 <= KW_EXPRESSION)||(LA129_0 >= KW_FIELDS && LA129_0 <= KW_FIRST)||(LA129_0 >= KW_FORMAT && LA129_0 <= KW_FORMATTED)||LA129_0==KW_FUNCTIONS||(LA129_0 >= KW_HOUR && LA129_0 <= KW_IDXPROPERTIES)||(LA129_0 >= KW_INDEX && LA129_0 <= KW_INDEXES)||(LA129_0 >= KW_INPATH && LA129_0 <= KW_INPUTFORMAT)||(LA129_0 >= KW_ISOLATION && LA129_0 <= KW_JAR)||(LA129_0 >= KW_KEY && LA129_0 <= KW_LAST)||LA129_0==KW_LEVEL||(LA129_0 >= KW_LIMIT && LA129_0 <= KW_LOAD)||(LA129_0 >= KW_LOCATION && LA129_0 <= KW_LONG)||LA129_0==KW_MANAGEMENT||(LA129_0 >= KW_MAPJOIN && LA129_0 <= KW_MATERIALIZED)||LA129_0==KW_METADATA||(LA129_0 >= KW_MINUTE && LA129_0 <= KW_MONTH)||(LA129_0 >= KW_MOVE && LA129_0 <= KW_MSCK)||(LA129_0 >= KW_NORELY && LA129_0 <= KW_NOSCAN)||LA129_0==KW_NOVALIDATE||LA129_0==KW_NULLS||LA129_0==KW_OFFSET||(LA129_0 >= KW_OPERATOR && LA129_0 <= KW_OPTION)||(LA129_0 >= KW_OUTPUTDRIVER && LA129_0 <= KW_OUTPUTFORMAT)||(LA129_0 >= KW_OVERWRITE && LA129_0 <= KW_OWNER)||(LA129_0 >= KW_PARTITIONED && LA129_0 <= KW_PATH)||(LA129_0 >= KW_PLAN && LA129_0 <= KW_POOL)||LA129_0==KW_PRINCIPALS||(LA129_0 >= KW_PURGE && LA129_0 <= KW_QUERY_PARALLELISM)||LA129_0==KW_READ||(LA129_0 >= KW_REBUILD && LA129_0 <= KW_RECORDWRITER)||(LA129_0 >= KW_RELOAD && LA129_0 <= KW_RESTRICT)||LA129_0==KW_REWRITE||(LA129_0 >= KW_ROLE && LA129_0 <= KW_ROLES)||(LA129_0 >= KW_SCHEDULING_POLICY && LA129_0 <= KW_SECOND)||(LA129_0 >= KW_SEMI && LA129_0 <= KW_SERVER)||(LA129_0 >= KW_SETS && LA129_0 <= KW_SKEWED)||(LA129_0 >= KW_SNAPSHOT && LA129_0 <= KW_SSL)||(LA129_0 >= KW_STATISTICS && LA129_0 <= KW_SUMMARY)||LA129_0==KW_TABLES||(LA129_0 >= KW_TBLPROPERTIES && LA129_0 <= KW_TERMINATED)||LA129_0==KW_TINYINT||(LA129_0 >= KW_TOUCH && LA129_0 <= KW_TRANSACTIONS)||LA129_0==KW_UNARCHIVE||LA129_0==KW_UNDO||LA129_0==KW_UNIONTYPE||(LA129_0 >= KW_UNLOCK && LA129_0 <= KW_UNSIGNED)||(LA129_0 >= KW_URI && LA129_0 <= KW_USE)||(LA129_0 >= KW_UTC && LA129_0 <= KW_VALIDATE)||LA129_0==KW_VALUE_TYPE||(LA129_0 >= KW_VECTORIZATION && LA129_0 <= KW_WEEK)||LA129_0==KW_WHILE||(LA129_0 >= KW_WORK && LA129_0 <= KW_ZONE)||LA129_0==StringLiteral||LA129_0==KW_BATCH||LA129_0==KW_DAYOFWEEK||LA129_0==KW_HOLD_DDLTIME||LA129_0==KW_IGNORE||LA129_0==KW_NO_DROP||LA129_0==KW_OFFLINE||LA129_0==KW_PROTECTION||LA129_0==KW_READONLY||LA129_0==KW_TIMESTAMPTZ) ) {
						alt129=2;
					}
					switch (alt129) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:64: KW_LIKE showStmtIdentifier
							{
							KW_LIKE436=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement7747); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LIKE.add(KW_LIKE436);

							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7749);
							showStmtIdentifier437=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier437.getTree());
							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:91: showStmtIdentifier
							{
							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7751);
							showStmtIdentifier438=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier438.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: showStmtIdentifier, db_name
					// token labels: 
					// rule labels: db_name, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1569:113: -> ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:116: ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWTABLES, "TOK_SHOWTABLES"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:133: ( TOK_FROM $db_name)?
						if ( stream_db_name.hasNext() ) {
							adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"));
							adaptor.addChild(root_1, stream_db_name.nextTree());
						}
						stream_db_name.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1569:154: ( showStmtIdentifier )?
						if ( stream_showStmtIdentifier.hasNext() ) {
							adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());
						}
						stream_showStmtIdentifier.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:7: KW_SHOW KW_VIEWS ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					{
					KW_SHOW439=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7779); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW439);

					KW_VIEWS440=(Token)match(input,KW_VIEWS,FOLLOW_KW_VIEWS_in_showStatement7781); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_VIEWS.add(KW_VIEWS440);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:24: ( ( KW_FROM | KW_IN ) db_name= identifier )?
					int alt131=2;
					int LA131_0 = input.LA(1);
					if ( (LA131_0==KW_FROM||LA131_0==KW_IN) ) {
						alt131=1;
					}
					switch (alt131) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:25: ( KW_FROM | KW_IN ) db_name= identifier
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:25: ( KW_FROM | KW_IN )
							int alt130=2;
							int LA130_0 = input.LA(1);
							if ( (LA130_0==KW_FROM) ) {
								alt130=1;
							}
							else if ( (LA130_0==KW_IN) ) {
								alt130=2;
							}

							else {
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 130, 0, input);
								throw nvae;
							}

							switch (alt130) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:26: KW_FROM
									{
									KW_FROM441=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement7785); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM441);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:34: KW_IN
									{
									KW_IN442=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement7787); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_IN.add(KW_IN442);

									}
									break;

							}

							pushFollow(FOLLOW_identifier_in_showStatement7792);
							db_name=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(db_name.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:62: ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					int alt132=3;
					int LA132_0 = input.LA(1);
					if ( (LA132_0==KW_LIKE) ) {
						alt132=1;
					}
					else if ( (LA132_0==Identifier||(LA132_0 >= KW_ABORT && LA132_0 <= KW_AFTER)||LA132_0==KW_ALLOC_FRACTION||LA132_0==KW_ANALYZE||LA132_0==KW_ARCHIVE||LA132_0==KW_ASC||(LA132_0 >= KW_AUTOCOMMIT && LA132_0 <= KW_BEFORE)||(LA132_0 >= KW_BUCKET && LA132_0 <= KW_BUCKETS)||(LA132_0 >= KW_CACHE && LA132_0 <= KW_CASCADE)||LA132_0==KW_CHANGE||(LA132_0 >= KW_CHECK && LA132_0 <= KW_COLLECTION)||(LA132_0 >= KW_COLUMNS && LA132_0 <= KW_COMMENT)||(LA132_0 >= KW_COMPACT && LA132_0 <= KW_CONCATENATE)||LA132_0==KW_CONTINUE||LA132_0==KW_DATA||LA132_0==KW_DATABASES||(LA132_0 >= KW_DATETIME && LA132_0 <= KW_DBPROPERTIES)||(LA132_0 >= KW_DEFAULT && LA132_0 <= KW_DEFINED)||(LA132_0 >= KW_DELIMITED && LA132_0 <= KW_DESC)||(LA132_0 >= KW_DETAIL && LA132_0 <= KW_DISABLE)||(LA132_0 >= KW_DISTRIBUTE && LA132_0 <= KW_DO)||LA132_0==KW_DOW||(LA132_0 >= KW_DUMP && LA132_0 <= KW_ELEM_TYPE)||LA132_0==KW_ENABLE||(LA132_0 >= KW_ENFORCED && LA132_0 <= KW_ESCAPED)||LA132_0==KW_EXCLUSIVE||(LA132_0 >= KW_EXPLAIN && LA132_0 <= KW_EXPRESSION)||(LA132_0 >= KW_FIELDS && LA132_0 <= KW_FIRST)||(LA132_0 >= KW_FORMAT && LA132_0 <= KW_FORMATTED)||LA132_0==KW_FUNCTIONS||(LA132_0 >= KW_HOUR && LA132_0 <= KW_IDXPROPERTIES)||(LA132_0 >= KW_INDEX && LA132_0 <= KW_INDEXES)||(LA132_0 >= KW_INPATH && LA132_0 <= KW_INPUTFORMAT)||(LA132_0 >= KW_ISOLATION && LA132_0 <= KW_JAR)||(LA132_0 >= KW_KEY && LA132_0 <= KW_LAST)||LA132_0==KW_LEVEL||(LA132_0 >= KW_LIMIT && LA132_0 <= KW_LOAD)||(LA132_0 >= KW_LOCATION && LA132_0 <= KW_LONG)||LA132_0==KW_MANAGEMENT||(LA132_0 >= KW_MAPJOIN && LA132_0 <= KW_MATERIALIZED)||LA132_0==KW_METADATA||(LA132_0 >= KW_MINUTE && LA132_0 <= KW_MONTH)||(LA132_0 >= KW_MOVE && LA132_0 <= KW_MSCK)||(LA132_0 >= KW_NORELY && LA132_0 <= KW_NOSCAN)||LA132_0==KW_NOVALIDATE||LA132_0==KW_NULLS||LA132_0==KW_OFFSET||(LA132_0 >= KW_OPERATOR && LA132_0 <= KW_OPTION)||(LA132_0 >= KW_OUTPUTDRIVER && LA132_0 <= KW_OUTPUTFORMAT)||(LA132_0 >= KW_OVERWRITE && LA132_0 <= KW_OWNER)||(LA132_0 >= KW_PARTITIONED && LA132_0 <= KW_PATH)||(LA132_0 >= KW_PLAN && LA132_0 <= KW_POOL)||LA132_0==KW_PRINCIPALS||(LA132_0 >= KW_PURGE && LA132_0 <= KW_QUERY_PARALLELISM)||LA132_0==KW_READ||(LA132_0 >= KW_REBUILD && LA132_0 <= KW_RECORDWRITER)||(LA132_0 >= KW_RELOAD && LA132_0 <= KW_RESTRICT)||LA132_0==KW_REWRITE||(LA132_0 >= KW_ROLE && LA132_0 <= KW_ROLES)||(LA132_0 >= KW_SCHEDULING_POLICY && LA132_0 <= KW_SECOND)||(LA132_0 >= KW_SEMI && LA132_0 <= KW_SERVER)||(LA132_0 >= KW_SETS && LA132_0 <= KW_SKEWED)||(LA132_0 >= KW_SNAPSHOT && LA132_0 <= KW_SSL)||(LA132_0 >= KW_STATISTICS && LA132_0 <= KW_SUMMARY)||LA132_0==KW_TABLES||(LA132_0 >= KW_TBLPROPERTIES && LA132_0 <= KW_TERMINATED)||LA132_0==KW_TINYINT||(LA132_0 >= KW_TOUCH && LA132_0 <= KW_TRANSACTIONS)||LA132_0==KW_UNARCHIVE||LA132_0==KW_UNDO||LA132_0==KW_UNIONTYPE||(LA132_0 >= KW_UNLOCK && LA132_0 <= KW_UNSIGNED)||(LA132_0 >= KW_URI && LA132_0 <= KW_USE)||(LA132_0 >= KW_UTC && LA132_0 <= KW_VALIDATE)||LA132_0==KW_VALUE_TYPE||(LA132_0 >= KW_VECTORIZATION && LA132_0 <= KW_WEEK)||LA132_0==KW_WHILE||(LA132_0 >= KW_WORK && LA132_0 <= KW_ZONE)||LA132_0==StringLiteral||LA132_0==KW_BATCH||LA132_0==KW_DAYOFWEEK||LA132_0==KW_HOLD_DDLTIME||LA132_0==KW_IGNORE||LA132_0==KW_NO_DROP||LA132_0==KW_OFFLINE||LA132_0==KW_PROTECTION||LA132_0==KW_READONLY||LA132_0==KW_TIMESTAMPTZ) ) {
						alt132=2;
					}
					switch (alt132) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:63: KW_LIKE showStmtIdentifier
							{
							KW_LIKE443=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement7797); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LIKE.add(KW_LIKE443);

							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7799);
							showStmtIdentifier444=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier444.getTree());
							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:90: showStmtIdentifier
							{
							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7801);
							showStmtIdentifier445=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier445.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: showStmtIdentifier, db_name
					// token labels: 
					// rule labels: db_name, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1570:112: -> ^( TOK_SHOWVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:115: ^( TOK_SHOWVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWVIEWS, "TOK_SHOWVIEWS"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:131: ( TOK_FROM $db_name)?
						if ( stream_db_name.hasNext() ) {
							adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"));
							adaptor.addChild(root_1, stream_db_name.nextTree());
						}
						stream_db_name.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:152: ( showStmtIdentifier )?
						if ( stream_showStmtIdentifier.hasNext() ) {
							adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());
						}
						stream_showStmtIdentifier.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:7: KW_SHOW KW_MATERIALIZED KW_VIEWS ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					{
					KW_SHOW446=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7829); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW446);

					KW_MATERIALIZED447=(Token)match(input,KW_MATERIALIZED,FOLLOW_KW_MATERIALIZED_in_showStatement7831); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_MATERIALIZED.add(KW_MATERIALIZED447);

					KW_VIEWS448=(Token)match(input,KW_VIEWS,FOLLOW_KW_VIEWS_in_showStatement7833); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_VIEWS.add(KW_VIEWS448);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:40: ( ( KW_FROM | KW_IN ) db_name= identifier )?
					int alt134=2;
					int LA134_0 = input.LA(1);
					if ( (LA134_0==KW_FROM||LA134_0==KW_IN) ) {
						alt134=1;
					}
					switch (alt134) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:41: ( KW_FROM | KW_IN ) db_name= identifier
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:41: ( KW_FROM | KW_IN )
							int alt133=2;
							int LA133_0 = input.LA(1);
							if ( (LA133_0==KW_FROM) ) {
								alt133=1;
							}
							else if ( (LA133_0==KW_IN) ) {
								alt133=2;
							}

							else {
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 133, 0, input);
								throw nvae;
							}

							switch (alt133) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:42: KW_FROM
									{
									KW_FROM449=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement7837); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM449);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:50: KW_IN
									{
									KW_IN450=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement7839); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_IN.add(KW_IN450);

									}
									break;

							}

							pushFollow(FOLLOW_identifier_in_showStatement7844);
							db_name=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(db_name.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:78: ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					int alt135=3;
					int LA135_0 = input.LA(1);
					if ( (LA135_0==KW_LIKE) ) {
						alt135=1;
					}
					else if ( (LA135_0==Identifier||(LA135_0 >= KW_ABORT && LA135_0 <= KW_AFTER)||LA135_0==KW_ALLOC_FRACTION||LA135_0==KW_ANALYZE||LA135_0==KW_ARCHIVE||LA135_0==KW_ASC||(LA135_0 >= KW_AUTOCOMMIT && LA135_0 <= KW_BEFORE)||(LA135_0 >= KW_BUCKET && LA135_0 <= KW_BUCKETS)||(LA135_0 >= KW_CACHE && LA135_0 <= KW_CASCADE)||LA135_0==KW_CHANGE||(LA135_0 >= KW_CHECK && LA135_0 <= KW_COLLECTION)||(LA135_0 >= KW_COLUMNS && LA135_0 <= KW_COMMENT)||(LA135_0 >= KW_COMPACT && LA135_0 <= KW_CONCATENATE)||LA135_0==KW_CONTINUE||LA135_0==KW_DATA||LA135_0==KW_DATABASES||(LA135_0 >= KW_DATETIME && LA135_0 <= KW_DBPROPERTIES)||(LA135_0 >= KW_DEFAULT && LA135_0 <= KW_DEFINED)||(LA135_0 >= KW_DELIMITED && LA135_0 <= KW_DESC)||(LA135_0 >= KW_DETAIL && LA135_0 <= KW_DISABLE)||(LA135_0 >= KW_DISTRIBUTE && LA135_0 <= KW_DO)||LA135_0==KW_DOW||(LA135_0 >= KW_DUMP && LA135_0 <= KW_ELEM_TYPE)||LA135_0==KW_ENABLE||(LA135_0 >= KW_ENFORCED && LA135_0 <= KW_ESCAPED)||LA135_0==KW_EXCLUSIVE||(LA135_0 >= KW_EXPLAIN && LA135_0 <= KW_EXPRESSION)||(LA135_0 >= KW_FIELDS && LA135_0 <= KW_FIRST)||(LA135_0 >= KW_FORMAT && LA135_0 <= KW_FORMATTED)||LA135_0==KW_FUNCTIONS||(LA135_0 >= KW_HOUR && LA135_0 <= KW_IDXPROPERTIES)||(LA135_0 >= KW_INDEX && LA135_0 <= KW_INDEXES)||(LA135_0 >= KW_INPATH && LA135_0 <= KW_INPUTFORMAT)||(LA135_0 >= KW_ISOLATION && LA135_0 <= KW_JAR)||(LA135_0 >= KW_KEY && LA135_0 <= KW_LAST)||LA135_0==KW_LEVEL||(LA135_0 >= KW_LIMIT && LA135_0 <= KW_LOAD)||(LA135_0 >= KW_LOCATION && LA135_0 <= KW_LONG)||LA135_0==KW_MANAGEMENT||(LA135_0 >= KW_MAPJOIN && LA135_0 <= KW_MATERIALIZED)||LA135_0==KW_METADATA||(LA135_0 >= KW_MINUTE && LA135_0 <= KW_MONTH)||(LA135_0 >= KW_MOVE && LA135_0 <= KW_MSCK)||(LA135_0 >= KW_NORELY && LA135_0 <= KW_NOSCAN)||LA135_0==KW_NOVALIDATE||LA135_0==KW_NULLS||LA135_0==KW_OFFSET||(LA135_0 >= KW_OPERATOR && LA135_0 <= KW_OPTION)||(LA135_0 >= KW_OUTPUTDRIVER && LA135_0 <= KW_OUTPUTFORMAT)||(LA135_0 >= KW_OVERWRITE && LA135_0 <= KW_OWNER)||(LA135_0 >= KW_PARTITIONED && LA135_0 <= KW_PATH)||(LA135_0 >= KW_PLAN && LA135_0 <= KW_POOL)||LA135_0==KW_PRINCIPALS||(LA135_0 >= KW_PURGE && LA135_0 <= KW_QUERY_PARALLELISM)||LA135_0==KW_READ||(LA135_0 >= KW_REBUILD && LA135_0 <= KW_RECORDWRITER)||(LA135_0 >= KW_RELOAD && LA135_0 <= KW_RESTRICT)||LA135_0==KW_REWRITE||(LA135_0 >= KW_ROLE && LA135_0 <= KW_ROLES)||(LA135_0 >= KW_SCHEDULING_POLICY && LA135_0 <= KW_SECOND)||(LA135_0 >= KW_SEMI && LA135_0 <= KW_SERVER)||(LA135_0 >= KW_SETS && LA135_0 <= KW_SKEWED)||(LA135_0 >= KW_SNAPSHOT && LA135_0 <= KW_SSL)||(LA135_0 >= KW_STATISTICS && LA135_0 <= KW_SUMMARY)||LA135_0==KW_TABLES||(LA135_0 >= KW_TBLPROPERTIES && LA135_0 <= KW_TERMINATED)||LA135_0==KW_TINYINT||(LA135_0 >= KW_TOUCH && LA135_0 <= KW_TRANSACTIONS)||LA135_0==KW_UNARCHIVE||LA135_0==KW_UNDO||LA135_0==KW_UNIONTYPE||(LA135_0 >= KW_UNLOCK && LA135_0 <= KW_UNSIGNED)||(LA135_0 >= KW_URI && LA135_0 <= KW_USE)||(LA135_0 >= KW_UTC && LA135_0 <= KW_VALIDATE)||LA135_0==KW_VALUE_TYPE||(LA135_0 >= KW_VECTORIZATION && LA135_0 <= KW_WEEK)||LA135_0==KW_WHILE||(LA135_0 >= KW_WORK && LA135_0 <= KW_ZONE)||LA135_0==StringLiteral||LA135_0==KW_BATCH||LA135_0==KW_DAYOFWEEK||LA135_0==KW_HOLD_DDLTIME||LA135_0==KW_IGNORE||LA135_0==KW_NO_DROP||LA135_0==KW_OFFLINE||LA135_0==KW_PROTECTION||LA135_0==KW_READONLY||LA135_0==KW_TIMESTAMPTZ) ) {
						alt135=2;
					}
					switch (alt135) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:79: KW_LIKE showStmtIdentifier
							{
							KW_LIKE451=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement7849); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LIKE.add(KW_LIKE451);

							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7851);
							showStmtIdentifier452=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier452.getTree());
							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:106: showStmtIdentifier
							{
							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7853);
							showStmtIdentifier453=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier453.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: db_name, showStmtIdentifier
					// token labels: 
					// rule labels: db_name, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1571:128: -> ^( TOK_SHOWMATERIALIZEDVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:131: ^( TOK_SHOWMATERIALIZEDVIEWS ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWMATERIALIZEDVIEWS, "TOK_SHOWMATERIALIZEDVIEWS"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:159: ( TOK_FROM $db_name)?
						if ( stream_db_name.hasNext() ) {
							adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"));
							adaptor.addChild(root_1, stream_db_name.nextTree());
						}
						stream_db_name.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:180: ( showStmtIdentifier )?
						if ( stream_showStmtIdentifier.hasNext() ) {
							adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());
						}
						stream_showStmtIdentifier.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:7: KW_SHOW KW_COLUMNS ( KW_FROM | KW_IN ) tableName ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					{
					KW_SHOW454=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7881); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW454);

					KW_COLUMNS455=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_showStatement7883); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COLUMNS.add(KW_COLUMNS455);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:26: ( KW_FROM | KW_IN )
					int alt136=2;
					int LA136_0 = input.LA(1);
					if ( (LA136_0==KW_FROM) ) {
						alt136=1;
					}
					else if ( (LA136_0==KW_IN) ) {
						alt136=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 136, 0, input);
						throw nvae;
					}

					switch (alt136) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:27: KW_FROM
							{
							KW_FROM456=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement7886); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM456);

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:35: KW_IN
							{
							KW_IN457=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement7888); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_IN.add(KW_IN457);

							}
							break;

					}

					pushFollow(FOLLOW_tableName_in_showStatement7891);
					tableName458=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName458.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:52: ( ( KW_FROM | KW_IN ) db_name= identifier )?
					int alt138=2;
					int LA138_0 = input.LA(1);
					if ( (LA138_0==KW_FROM||LA138_0==KW_IN) ) {
						alt138=1;
					}
					switch (alt138) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:53: ( KW_FROM | KW_IN ) db_name= identifier
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:53: ( KW_FROM | KW_IN )
							int alt137=2;
							int LA137_0 = input.LA(1);
							if ( (LA137_0==KW_FROM) ) {
								alt137=1;
							}
							else if ( (LA137_0==KW_IN) ) {
								alt137=2;
							}

							else {
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 137, 0, input);
								throw nvae;
							}

							switch (alt137) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:54: KW_FROM
									{
									KW_FROM459=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement7895); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM459);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:62: KW_IN
									{
									KW_IN460=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement7897); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_IN.add(KW_IN460);

									}
									break;

							}

							pushFollow(FOLLOW_identifier_in_showStatement7902);
							db_name=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(db_name.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:90: ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
					int alt139=3;
					int LA139_0 = input.LA(1);
					if ( (LA139_0==KW_LIKE) ) {
						alt139=1;
					}
					else if ( (LA139_0==Identifier||(LA139_0 >= KW_ABORT && LA139_0 <= KW_AFTER)||LA139_0==KW_ALLOC_FRACTION||LA139_0==KW_ANALYZE||LA139_0==KW_ARCHIVE||LA139_0==KW_ASC||(LA139_0 >= KW_AUTOCOMMIT && LA139_0 <= KW_BEFORE)||(LA139_0 >= KW_BUCKET && LA139_0 <= KW_BUCKETS)||(LA139_0 >= KW_CACHE && LA139_0 <= KW_CASCADE)||LA139_0==KW_CHANGE||(LA139_0 >= KW_CHECK && LA139_0 <= KW_COLLECTION)||(LA139_0 >= KW_COLUMNS && LA139_0 <= KW_COMMENT)||(LA139_0 >= KW_COMPACT && LA139_0 <= KW_CONCATENATE)||LA139_0==KW_CONTINUE||LA139_0==KW_DATA||LA139_0==KW_DATABASES||(LA139_0 >= KW_DATETIME && LA139_0 <= KW_DBPROPERTIES)||(LA139_0 >= KW_DEFAULT && LA139_0 <= KW_DEFINED)||(LA139_0 >= KW_DELIMITED && LA139_0 <= KW_DESC)||(LA139_0 >= KW_DETAIL && LA139_0 <= KW_DISABLE)||(LA139_0 >= KW_DISTRIBUTE && LA139_0 <= KW_DO)||LA139_0==KW_DOW||(LA139_0 >= KW_DUMP && LA139_0 <= KW_ELEM_TYPE)||LA139_0==KW_ENABLE||(LA139_0 >= KW_ENFORCED && LA139_0 <= KW_ESCAPED)||LA139_0==KW_EXCLUSIVE||(LA139_0 >= KW_EXPLAIN && LA139_0 <= KW_EXPRESSION)||(LA139_0 >= KW_FIELDS && LA139_0 <= KW_FIRST)||(LA139_0 >= KW_FORMAT && LA139_0 <= KW_FORMATTED)||LA139_0==KW_FUNCTIONS||(LA139_0 >= KW_HOUR && LA139_0 <= KW_IDXPROPERTIES)||(LA139_0 >= KW_INDEX && LA139_0 <= KW_INDEXES)||(LA139_0 >= KW_INPATH && LA139_0 <= KW_INPUTFORMAT)||(LA139_0 >= KW_ISOLATION && LA139_0 <= KW_JAR)||(LA139_0 >= KW_KEY && LA139_0 <= KW_LAST)||LA139_0==KW_LEVEL||(LA139_0 >= KW_LIMIT && LA139_0 <= KW_LOAD)||(LA139_0 >= KW_LOCATION && LA139_0 <= KW_LONG)||LA139_0==KW_MANAGEMENT||(LA139_0 >= KW_MAPJOIN && LA139_0 <= KW_MATERIALIZED)||LA139_0==KW_METADATA||(LA139_0 >= KW_MINUTE && LA139_0 <= KW_MONTH)||(LA139_0 >= KW_MOVE && LA139_0 <= KW_MSCK)||(LA139_0 >= KW_NORELY && LA139_0 <= KW_NOSCAN)||LA139_0==KW_NOVALIDATE||LA139_0==KW_NULLS||LA139_0==KW_OFFSET||(LA139_0 >= KW_OPERATOR && LA139_0 <= KW_OPTION)||(LA139_0 >= KW_OUTPUTDRIVER && LA139_0 <= KW_OUTPUTFORMAT)||(LA139_0 >= KW_OVERWRITE && LA139_0 <= KW_OWNER)||(LA139_0 >= KW_PARTITIONED && LA139_0 <= KW_PATH)||(LA139_0 >= KW_PLAN && LA139_0 <= KW_POOL)||LA139_0==KW_PRINCIPALS||(LA139_0 >= KW_PURGE && LA139_0 <= KW_QUERY_PARALLELISM)||LA139_0==KW_READ||(LA139_0 >= KW_REBUILD && LA139_0 <= KW_RECORDWRITER)||(LA139_0 >= KW_RELOAD && LA139_0 <= KW_RESTRICT)||LA139_0==KW_REWRITE||(LA139_0 >= KW_ROLE && LA139_0 <= KW_ROLES)||(LA139_0 >= KW_SCHEDULING_POLICY && LA139_0 <= KW_SECOND)||(LA139_0 >= KW_SEMI && LA139_0 <= KW_SERVER)||(LA139_0 >= KW_SETS && LA139_0 <= KW_SKEWED)||(LA139_0 >= KW_SNAPSHOT && LA139_0 <= KW_SSL)||(LA139_0 >= KW_STATISTICS && LA139_0 <= KW_SUMMARY)||LA139_0==KW_TABLES||(LA139_0 >= KW_TBLPROPERTIES && LA139_0 <= KW_TERMINATED)||LA139_0==KW_TINYINT||(LA139_0 >= KW_TOUCH && LA139_0 <= KW_TRANSACTIONS)||LA139_0==KW_UNARCHIVE||LA139_0==KW_UNDO||LA139_0==KW_UNIONTYPE||(LA139_0 >= KW_UNLOCK && LA139_0 <= KW_UNSIGNED)||(LA139_0 >= KW_URI && LA139_0 <= KW_USE)||(LA139_0 >= KW_UTC && LA139_0 <= KW_VALIDATE)||LA139_0==KW_VALUE_TYPE||(LA139_0 >= KW_VECTORIZATION && LA139_0 <= KW_WEEK)||LA139_0==KW_WHILE||(LA139_0 >= KW_WORK && LA139_0 <= KW_ZONE)||LA139_0==StringLiteral||LA139_0==KW_BATCH||LA139_0==KW_DAYOFWEEK||LA139_0==KW_HOLD_DDLTIME||LA139_0==KW_IGNORE||LA139_0==KW_NO_DROP||LA139_0==KW_OFFLINE||LA139_0==KW_PROTECTION||LA139_0==KW_READONLY||LA139_0==KW_TIMESTAMPTZ) ) {
						alt139=2;
					}
					switch (alt139) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:91: KW_LIKE showStmtIdentifier
							{
							KW_LIKE461=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement7907); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LIKE.add(KW_LIKE461);

							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7909);
							showStmtIdentifier462=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier462.getTree());
							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1572:118: showStmtIdentifier
							{
							pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7911);
							showStmtIdentifier463=showStmtIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier463.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: tableName, db_name, showStmtIdentifier
					// token labels: 
					// rule labels: db_name, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1573:5: -> ^( TOK_SHOWCOLUMNS tableName ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1573:8: ^( TOK_SHOWCOLUMNS tableName ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWCOLUMNS, "TOK_SHOWCOLUMNS"), root_1);
						adaptor.addChild(root_1, stream_tableName.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1573:36: ( TOK_FROM $db_name)?
						if ( stream_db_name.hasNext() ) {
							adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"));
							adaptor.addChild(root_1, stream_db_name.nextTree());
						}
						stream_db_name.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1573:57: ( showStmtIdentifier )?
						if ( stream_showStmtIdentifier.hasNext() ) {
							adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());
						}
						stream_showStmtIdentifier.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:7: KW_SHOW KW_FUNCTIONS ( KW_LIKE showFunctionIdentifier | showFunctionIdentifier )?
					{
					KW_SHOW464=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7944); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW464);

					KW_FUNCTIONS465=(Token)match(input,KW_FUNCTIONS,FOLLOW_KW_FUNCTIONS_in_showStatement7946); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_FUNCTIONS.add(KW_FUNCTIONS465);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:28: ( KW_LIKE showFunctionIdentifier | showFunctionIdentifier )?
					int alt140=3;
					int LA140_0 = input.LA(1);
					if ( (LA140_0==KW_LIKE) ) {
						alt140=1;
					}
					else if ( (LA140_0==Identifier||(LA140_0 >= KW_ABORT && LA140_0 <= KW_AFTER)||LA140_0==KW_ALLOC_FRACTION||LA140_0==KW_ANALYZE||LA140_0==KW_ARCHIVE||LA140_0==KW_ASC||(LA140_0 >= KW_AUTOCOMMIT && LA140_0 <= KW_BEFORE)||(LA140_0 >= KW_BUCKET && LA140_0 <= KW_BUCKETS)||(LA140_0 >= KW_CACHE && LA140_0 <= KW_CASCADE)||LA140_0==KW_CHANGE||(LA140_0 >= KW_CHECK && LA140_0 <= KW_COLLECTION)||(LA140_0 >= KW_COLUMNS && LA140_0 <= KW_COMMENT)||(LA140_0 >= KW_COMPACT && LA140_0 <= KW_CONCATENATE)||LA140_0==KW_CONTINUE||LA140_0==KW_DATA||LA140_0==KW_DATABASES||(LA140_0 >= KW_DATETIME && LA140_0 <= KW_DBPROPERTIES)||(LA140_0 >= KW_DEFAULT && LA140_0 <= KW_DEFINED)||(LA140_0 >= KW_DELIMITED && LA140_0 <= KW_DESC)||(LA140_0 >= KW_DETAIL && LA140_0 <= KW_DISABLE)||(LA140_0 >= KW_DISTRIBUTE && LA140_0 <= KW_DO)||LA140_0==KW_DOW||(LA140_0 >= KW_DUMP && LA140_0 <= KW_ELEM_TYPE)||LA140_0==KW_ENABLE||(LA140_0 >= KW_ENFORCED && LA140_0 <= KW_ESCAPED)||LA140_0==KW_EXCLUSIVE||(LA140_0 >= KW_EXPLAIN && LA140_0 <= KW_EXPRESSION)||(LA140_0 >= KW_FIELDS && LA140_0 <= KW_FIRST)||(LA140_0 >= KW_FORMAT && LA140_0 <= KW_FORMATTED)||LA140_0==KW_FUNCTIONS||(LA140_0 >= KW_HOUR && LA140_0 <= KW_IDXPROPERTIES)||(LA140_0 >= KW_INDEX && LA140_0 <= KW_INDEXES)||(LA140_0 >= KW_INPATH && LA140_0 <= KW_INPUTFORMAT)||(LA140_0 >= KW_ISOLATION && LA140_0 <= KW_JAR)||(LA140_0 >= KW_KEY && LA140_0 <= KW_LAST)||LA140_0==KW_LEVEL||(LA140_0 >= KW_LIMIT && LA140_0 <= KW_LOAD)||(LA140_0 >= KW_LOCATION && LA140_0 <= KW_LONG)||LA140_0==KW_MANAGEMENT||(LA140_0 >= KW_MAPJOIN && LA140_0 <= KW_MATERIALIZED)||LA140_0==KW_METADATA||(LA140_0 >= KW_MINUTE && LA140_0 <= KW_MONTH)||(LA140_0 >= KW_MOVE && LA140_0 <= KW_MSCK)||(LA140_0 >= KW_NORELY && LA140_0 <= KW_NOSCAN)||LA140_0==KW_NOVALIDATE||LA140_0==KW_NULLS||LA140_0==KW_OFFSET||(LA140_0 >= KW_OPERATOR && LA140_0 <= KW_OPTION)||(LA140_0 >= KW_OUTPUTDRIVER && LA140_0 <= KW_OUTPUTFORMAT)||(LA140_0 >= KW_OVERWRITE && LA140_0 <= KW_OWNER)||(LA140_0 >= KW_PARTITIONED && LA140_0 <= KW_PATH)||(LA140_0 >= KW_PLAN && LA140_0 <= KW_POOL)||LA140_0==KW_PRINCIPALS||(LA140_0 >= KW_PURGE && LA140_0 <= KW_QUERY_PARALLELISM)||LA140_0==KW_READ||(LA140_0 >= KW_REBUILD && LA140_0 <= KW_RECORDWRITER)||(LA140_0 >= KW_RELOAD && LA140_0 <= KW_RESTRICT)||LA140_0==KW_REWRITE||(LA140_0 >= KW_ROLE && LA140_0 <= KW_ROLES)||(LA140_0 >= KW_SCHEDULING_POLICY && LA140_0 <= KW_SECOND)||(LA140_0 >= KW_SEMI && LA140_0 <= KW_SERVER)||(LA140_0 >= KW_SETS && LA140_0 <= KW_SKEWED)||(LA140_0 >= KW_SNAPSHOT && LA140_0 <= KW_SSL)||(LA140_0 >= KW_STATISTICS && LA140_0 <= KW_SUMMARY)||LA140_0==KW_TABLES||(LA140_0 >= KW_TBLPROPERTIES && LA140_0 <= KW_TERMINATED)||LA140_0==KW_TINYINT||(LA140_0 >= KW_TOUCH && LA140_0 <= KW_TRANSACTIONS)||LA140_0==KW_UNARCHIVE||LA140_0==KW_UNDO||LA140_0==KW_UNIONTYPE||(LA140_0 >= KW_UNLOCK && LA140_0 <= KW_UNSIGNED)||(LA140_0 >= KW_URI && LA140_0 <= KW_USE)||(LA140_0 >= KW_UTC && LA140_0 <= KW_VALIDATE)||LA140_0==KW_VALUE_TYPE||(LA140_0 >= KW_VECTORIZATION && LA140_0 <= KW_WEEK)||LA140_0==KW_WHILE||(LA140_0 >= KW_WORK && LA140_0 <= KW_ZONE)||LA140_0==StringLiteral||LA140_0==KW_BATCH||LA140_0==KW_DAYOFWEEK||LA140_0==KW_HOLD_DDLTIME||LA140_0==KW_IGNORE||LA140_0==KW_NO_DROP||LA140_0==KW_OFFLINE||LA140_0==KW_PROTECTION||LA140_0==KW_READONLY||LA140_0==KW_TIMESTAMPTZ) ) {
						alt140=2;
					}
					switch (alt140) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:29: KW_LIKE showFunctionIdentifier
							{
							KW_LIKE466=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement7949); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LIKE.add(KW_LIKE466);

							pushFollow(FOLLOW_showFunctionIdentifier_in_showStatement7951);
							showFunctionIdentifier467=showFunctionIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showFunctionIdentifier.add(showFunctionIdentifier467.getTree());
							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:60: showFunctionIdentifier
							{
							pushFollow(FOLLOW_showFunctionIdentifier_in_showStatement7953);
							showFunctionIdentifier468=showFunctionIdentifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_showFunctionIdentifier.add(showFunctionIdentifier468.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: showFunctionIdentifier, KW_LIKE
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1574:86: -> ^( TOK_SHOWFUNCTIONS ( KW_LIKE )? ( showFunctionIdentifier )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:89: ^( TOK_SHOWFUNCTIONS ( KW_LIKE )? ( showFunctionIdentifier )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWFUNCTIONS, "TOK_SHOWFUNCTIONS"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:109: ( KW_LIKE )?
						if ( stream_KW_LIKE.hasNext() ) {
							adaptor.addChild(root_1, stream_KW_LIKE.nextNode());
						}
						stream_KW_LIKE.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:118: ( showFunctionIdentifier )?
						if ( stream_showFunctionIdentifier.hasNext() ) {
							adaptor.addChild(root_1, stream_showFunctionIdentifier.nextTree());
						}
						stream_showFunctionIdentifier.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1575:7: KW_SHOW KW_PARTITIONS tabName= tableName ( partitionSpec )?
					{
					KW_SHOW469=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7976); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW469);

					KW_PARTITIONS470=(Token)match(input,KW_PARTITIONS,FOLLOW_KW_PARTITIONS_in_showStatement7978); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_PARTITIONS.add(KW_PARTITIONS470);

					pushFollow(FOLLOW_tableName_in_showStatement7982);
					tabName=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tabName.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1575:47: ( partitionSpec )?
					int alt141=2;
					int LA141_0 = input.LA(1);
					if ( (LA141_0==KW_PARTITION) ) {
						alt141=1;
					}
					switch (alt141) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1575:47: partitionSpec
							{
							pushFollow(FOLLOW_partitionSpec_in_showStatement7984);
							partitionSpec471=partitionSpec();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec471.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: partitionSpec, tabName
					// token labels: 
					// rule labels: tabName, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1575:62: -> ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1575:65: ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWPARTITIONS, "TOK_SHOWPARTITIONS"), root_1);
						adaptor.addChild(root_1, stream_tabName.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1575:95: ( partitionSpec )?
						if ( stream_partitionSpec.hasNext() ) {
							adaptor.addChild(root_1, stream_partitionSpec.nextTree());
						}
						stream_partitionSpec.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1576:7: KW_SHOW KW_CREATE ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) db_name= identifier -> ^( TOK_SHOW_CREATEDATABASE $db_name) | KW_TABLE tabName= tableName -> ^( TOK_SHOW_CREATETABLE $tabName) )
					{
					KW_SHOW472=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8006); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW472);

					KW_CREATE473=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_showStatement8008); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE473);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1576:25: ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) db_name= identifier -> ^( TOK_SHOW_CREATEDATABASE $db_name) | KW_TABLE tabName= tableName -> ^( TOK_SHOW_CREATETABLE $tabName) )
					int alt143=2;
					int LA143_0 = input.LA(1);
					if ( (LA143_0==KW_DATABASE) && (synpred12_HiveParser())) {
						alt143=1;
					}
					else if ( (LA143_0==KW_SCHEMA) && (synpred12_HiveParser())) {
						alt143=1;
					}
					else if ( (LA143_0==KW_TABLE) ) {
						alt143=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 143, 0, input);
						throw nvae;
					}

					switch (alt143) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1577:9: ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) db_name= identifier
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1577:36: ( KW_DATABASE | KW_SCHEMA )
							int alt142=2;
							int LA142_0 = input.LA(1);
							if ( (LA142_0==KW_DATABASE) ) {
								alt142=1;
							}
							else if ( (LA142_0==KW_SCHEMA) ) {
								alt142=2;
							}

							else {
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 142, 0, input);
								throw nvae;
							}

							switch (alt142) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1577:37: KW_DATABASE
									{
									KW_DATABASE474=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_showStatement8029); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE474);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1577:49: KW_SCHEMA
									{
									KW_SCHEMA475=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_showStatement8031); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA475);

									}
									break;

							}

							pushFollow(FOLLOW_identifier_in_showStatement8036);
							db_name=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(db_name.getTree());
							// AST REWRITE
							// elements: db_name
							// token labels: 
							// rule labels: db_name, retval
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.getTree():null);
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 1577:79: -> ^( TOK_SHOW_CREATEDATABASE $db_name)
							{
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1577:82: ^( TOK_SHOW_CREATEDATABASE $db_name)
								{
								ASTNode root_1 = (ASTNode)adaptor.nil();
								root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_CREATEDATABASE, "TOK_SHOW_CREATEDATABASE"), root_1);
								adaptor.addChild(root_1, stream_db_name.nextTree());
								adaptor.addChild(root_0, root_1);
								}

							}


							retval.tree = root_0;
							}

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1579:9: KW_TABLE tabName= tableName
							{
							KW_TABLE476=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_showStatement8065); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE476);

							pushFollow(FOLLOW_tableName_in_showStatement8069);
							tabName=tableName();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableName.add(tabName.getTree());
							// AST REWRITE
							// elements: tabName
							// token labels: 
							// rule labels: tabName, retval
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.getTree():null);
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 1579:36: -> ^( TOK_SHOW_CREATETABLE $tabName)
							{
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1579:39: ^( TOK_SHOW_CREATETABLE $tabName)
								{
								ASTNode root_1 = (ASTNode)adaptor.nil();
								root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_CREATETABLE, "TOK_SHOW_CREATETABLE"), root_1);
								adaptor.addChild(root_1, stream_tabName.nextTree());
								adaptor.addChild(root_0, root_1);
								}

							}


							retval.tree = root_0;
							}

							}
							break;

					}

					}
					break;
				case 9 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:7: KW_SHOW KW_TABLE KW_EXTENDED ( ( KW_FROM | KW_IN ) db_name= identifier )? KW_LIKE showStmtIdentifier ( partitionSpec )?
					{
					KW_SHOW477=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8094); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW477);

					KW_TABLE478=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_showStatement8096); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE478);

					KW_EXTENDED479=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_showStatement8098); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_EXTENDED.add(KW_EXTENDED479);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:36: ( ( KW_FROM | KW_IN ) db_name= identifier )?
					int alt145=2;
					int LA145_0 = input.LA(1);
					if ( (LA145_0==KW_FROM||LA145_0==KW_IN) ) {
						alt145=1;
					}
					switch (alt145) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:37: ( KW_FROM | KW_IN ) db_name= identifier
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:37: ( KW_FROM | KW_IN )
							int alt144=2;
							int LA144_0 = input.LA(1);
							if ( (LA144_0==KW_FROM) ) {
								alt144=1;
							}
							else if ( (LA144_0==KW_IN) ) {
								alt144=2;
							}

							else {
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 144, 0, input);
								throw nvae;
							}

							switch (alt144) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:38: KW_FROM
									{
									KW_FROM480=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement8102); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM480);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:46: KW_IN
									{
									KW_IN481=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement8104); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_IN.add(KW_IN481);

									}
									break;

							}

							pushFollow(FOLLOW_identifier_in_showStatement8109);
							db_name=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(db_name.getTree());
							}
							break;

					}

					KW_LIKE482=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement8113); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LIKE.add(KW_LIKE482);

					pushFollow(FOLLOW_showStmtIdentifier_in_showStatement8115);
					showStmtIdentifier483=showStmtIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_showStmtIdentifier.add(showStmtIdentifier483.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:101: ( partitionSpec )?
					int alt146=2;
					int LA146_0 = input.LA(1);
					if ( (LA146_0==KW_PARTITION) ) {
						alt146=1;
					}
					switch (alt146) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:101: partitionSpec
							{
							pushFollow(FOLLOW_partitionSpec_in_showStatement8117);
							partitionSpec484=partitionSpec();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec484.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: partitionSpec, db_name, showStmtIdentifier
					// token labels: 
					// rule labels: db_name, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1582:5: -> ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1582:8: ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_TABLESTATUS, "TOK_SHOW_TABLESTATUS"), root_1);
						adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1582:51: ( $db_name)?
						if ( stream_db_name.hasNext() ) {
							adaptor.addChild(root_1, stream_db_name.nextTree());
						}
						stream_db_name.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1582:60: ( partitionSpec )?
						if ( stream_partitionSpec.hasNext() ) {
							adaptor.addChild(root_1, stream_partitionSpec.nextTree());
						}
						stream_partitionSpec.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 10 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1583:7: KW_SHOW KW_TBLPROPERTIES tableName ( LPAREN prptyName= StringLiteral RPAREN )?
					{
					KW_SHOW485=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8145); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW485);

					KW_TBLPROPERTIES486=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_showStatement8147); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES486);

					pushFollow(FOLLOW_tableName_in_showStatement8149);
					tableName487=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName487.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1583:42: ( LPAREN prptyName= StringLiteral RPAREN )?
					int alt147=2;
					int LA147_0 = input.LA(1);
					if ( (LA147_0==LPAREN) ) {
						alt147=1;
					}
					switch (alt147) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1583:43: LPAREN prptyName= StringLiteral RPAREN
							{
							LPAREN488=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_showStatement8152); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN488);

							prptyName=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showStatement8156); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_StringLiteral.add(prptyName);

							RPAREN489=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_showStatement8158); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN489);

							}
							break;

					}

					// AST REWRITE
					// elements: prptyName, tableName
					// token labels: prptyName
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_prptyName=new RewriteRuleTokenStream(adaptor,"token prptyName",prptyName);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1583:83: -> ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1583:86: ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_TBLPROPERTIES, "TOK_SHOW_TBLPROPERTIES"), root_1);
						adaptor.addChild(root_1, stream_tableName.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1583:122: ( $prptyName)?
						if ( stream_prptyName.hasNext() ) {
							adaptor.addChild(root_1, stream_prptyName.nextNode());
						}
						stream_prptyName.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 11 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1584:7: KW_SHOW KW_LOCKS ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? ) | (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? ) )
					{
					KW_SHOW490=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8180); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW490);

					KW_LOCKS491=(Token)match(input,KW_LOCKS,FOLLOW_KW_LOCKS_in_showStatement8182); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LOCKS.add(KW_LOCKS491);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1585:7: ( ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? ) | (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? ) )
					int alt152=2;
					int LA152_0 = input.LA(1);
					if ( (LA152_0==KW_DATABASE) && (synpred13_HiveParser())) {
						alt152=1;
					}
					else if ( (LA152_0==KW_SCHEMA) ) {
						switch ( input.LA(2) ) {
						case Identifier:
							{
							int LA152_7 = input.LA(3);
							if ( (synpred13_HiveParser()) ) {
								alt152=1;
							}
							else if ( (true) ) {
								alt152=2;
							}

							}
							break;
						case KW_ABORT:
						case KW_ACTIVATE:
						case KW_ACTIVE:
						case KW_ADD:
						case KW_ADMIN:
						case KW_AFTER:
						case KW_ALLOC_FRACTION:
						case KW_ANALYZE:
						case KW_ARCHIVE:
						case KW_ASC:
						case KW_AUTOCOMMIT:
						case KW_BEFORE:
						case KW_BUCKET:
						case KW_BUCKETS:
						case KW_CACHE:
						case KW_CASCADE:
						case KW_CHANGE:
						case KW_CHECK:
						case KW_CLUSTER:
						case KW_CLUSTERED:
						case KW_CLUSTERSTATUS:
						case KW_COLLECTION:
						case KW_COLUMNS:
						case KW_COMMENT:
						case KW_COMPACT:
						case KW_COMPACTIONS:
						case KW_COMPUTE:
						case KW_CONCATENATE:
						case KW_CONTINUE:
						case KW_DATA:
						case KW_DATABASES:
						case KW_DATETIME:
						case KW_DAY:
						case KW_DBPROPERTIES:
						case KW_DEFAULT:
						case KW_DEFERRED:
						case KW_DEFINED:
						case KW_DELIMITED:
						case KW_DEPENDENCY:
						case KW_DESC:
						case KW_DETAIL:
						case KW_DIRECTORIES:
						case KW_DIRECTORY:
						case KW_DISABLE:
						case KW_DISTRIBUTE:
						case KW_DO:
						case KW_DOW:
						case KW_DUMP:
						case KW_ELEM_TYPE:
						case KW_ENABLE:
						case KW_ENFORCED:
						case KW_ESCAPED:
						case KW_EXCLUSIVE:
						case KW_EXPLAIN:
						case KW_EXPORT:
						case KW_EXPRESSION:
						case KW_FIELDS:
						case KW_FILE:
						case KW_FILEFORMAT:
						case KW_FIRST:
						case KW_FORMAT:
						case KW_FORMATTED:
						case KW_FUNCTIONS:
						case KW_HOUR:
						case KW_IDXPROPERTIES:
						case KW_INDEX:
						case KW_INDEXES:
						case KW_INPATH:
						case KW_INPUTDRIVER:
						case KW_INPUTFORMAT:
						case KW_ISOLATION:
						case KW_ITEMS:
						case KW_JAR:
						case KW_KEY:
						case KW_KEYS:
						case KW_KEY_TYPE:
						case KW_KILL:
						case KW_LAST:
						case KW_LEVEL:
						case KW_LIMIT:
						case KW_LINES:
						case KW_LOAD:
						case KW_LOCATION:
						case KW_LOCK:
						case KW_LOCKS:
						case KW_LOGICAL:
						case KW_LONG:
						case KW_MANAGEMENT:
						case KW_MAPJOIN:
						case KW_MAPPING:
						case KW_MATCHED:
						case KW_MATERIALIZED:
						case KW_METADATA:
						case KW_MINUTE:
						case KW_MONTH:
						case KW_MOVE:
						case KW_MSCK:
						case KW_NORELY:
						case KW_NOSCAN:
						case KW_NOVALIDATE:
						case KW_NULLS:
						case KW_OFFSET:
						case KW_OPERATOR:
						case KW_OPTION:
						case KW_OUTPUTDRIVER:
						case KW_OUTPUTFORMAT:
						case KW_OVERWRITE:
						case KW_OWNER:
						case KW_PARTITIONED:
						case KW_PARTITIONS:
						case KW_PATH:
						case KW_PLAN:
						case KW_PLANS:
						case KW_PLUS:
						case KW_POOL:
						case KW_PRINCIPALS:
						case KW_PURGE:
						case KW_QUARTER:
						case KW_QUERY:
						case KW_QUERY_PARALLELISM:
						case KW_READ:
						case KW_REBUILD:
						case KW_RECORDREADER:
						case KW_RECORDWRITER:
						case KW_RELOAD:
						case KW_RELY:
						case KW_RENAME:
						case KW_REOPTIMIZATION:
						case KW_REPAIR:
						case KW_REPL:
						case KW_REPLACE:
						case KW_REPLICATION:
						case KW_RESOURCE:
						case KW_RESTRICT:
						case KW_REWRITE:
						case KW_ROLE:
						case KW_ROLES:
						case KW_SCHEDULING_POLICY:
						case KW_SCHEMA:
						case KW_SCHEMAS:
						case KW_SECOND:
						case KW_SEMI:
						case KW_SERDE:
						case KW_SERDEPROPERTIES:
						case KW_SERVER:
						case KW_SETS:
						case KW_SHARED:
						case KW_SHOW:
						case KW_SHOW_DATABASE:
						case KW_SKEWED:
						case KW_SNAPSHOT:
						case KW_SORT:
						case KW_SORTED:
						case KW_SSL:
						case KW_STATISTICS:
						case KW_STATUS:
						case KW_STORED:
						case KW_STREAMTABLE:
						case KW_STRING:
						case KW_STRUCT:
						case KW_SUMMARY:
						case KW_TABLES:
						case KW_TBLPROPERTIES:
						case KW_TEMPORARY:
						case KW_TERMINATED:
						case KW_TINYINT:
						case KW_TOUCH:
						case KW_TRANSACTION:
						case KW_TRANSACTIONS:
						case KW_UNARCHIVE:
						case KW_UNDO:
						case KW_UNIONTYPE:
						case KW_UNLOCK:
						case KW_UNMANAGED:
						case KW_UNSET:
						case KW_UNSIGNED:
						case KW_URI:
						case KW_USE:
						case KW_UTC:
						case KW_UTCTIMESTAMP:
						case KW_VALIDATE:
						case KW_VALUE_TYPE:
						case KW_VECTORIZATION:
						case KW_VIEW:
						case KW_VIEWS:
						case KW_WAIT:
						case KW_WEEK:
						case KW_WHILE:
						case KW_WORK:
						case KW_WORKLOAD:
						case KW_WRITE:
						case KW_YEAR:
						case KW_ZONE:
						case KW_BATCH:
						case KW_DAYOFWEEK:
						case KW_HOLD_DDLTIME:
						case KW_IGNORE:
						case KW_NO_DROP:
						case KW_OFFLINE:
						case KW_PROTECTION:
						case KW_READONLY:
						case KW_TIMESTAMPTZ:
							{
							int LA152_8 = input.LA(3);
							if ( (synpred13_HiveParser()) ) {
								alt152=1;
							}
							else if ( (true) ) {
								alt152=2;
							}

							}
							break;
						case EOF:
						case DOT:
						case KW_EXTENDED:
						case KW_PARTITION:
							{
							alt152=2;
							}
							break;
						default:
							if (state.backtracking>0) {state.failed=true; return retval;}
							int nvaeMark = input.mark();
							try {
								input.consume();
								NoViableAltException nvae =
									new NoViableAltException("", 152, 2, input);
								throw nvae;
							} finally {
								input.rewind(nvaeMark);
							}
						}
					}
					else if ( (LA152_0==EOF||LA152_0==Identifier||(LA152_0 >= KW_ABORT && LA152_0 <= KW_AFTER)||LA152_0==KW_ALLOC_FRACTION||LA152_0==KW_ANALYZE||LA152_0==KW_ARCHIVE||LA152_0==KW_ASC||(LA152_0 >= KW_AUTOCOMMIT && LA152_0 <= KW_BEFORE)||(LA152_0 >= KW_BUCKET && LA152_0 <= KW_BUCKETS)||(LA152_0 >= KW_CACHE && LA152_0 <= KW_CASCADE)||LA152_0==KW_CHANGE||(LA152_0 >= KW_CHECK && LA152_0 <= KW_COLLECTION)||(LA152_0 >= KW_COLUMNS && LA152_0 <= KW_COMMENT)||(LA152_0 >= KW_COMPACT && LA152_0 <= KW_CONCATENATE)||LA152_0==KW_CONTINUE||LA152_0==KW_DATA||LA152_0==KW_DATABASES||(LA152_0 >= KW_DATETIME && LA152_0 <= KW_DBPROPERTIES)||(LA152_0 >= KW_DEFAULT && LA152_0 <= KW_DEFINED)||(LA152_0 >= KW_DELIMITED && LA152_0 <= KW_DESC)||(LA152_0 >= KW_DETAIL && LA152_0 <= KW_DISABLE)||(LA152_0 >= KW_DISTRIBUTE && LA152_0 <= KW_DO)||LA152_0==KW_DOW||(LA152_0 >= KW_DUMP && LA152_0 <= KW_ELEM_TYPE)||LA152_0==KW_ENABLE||(LA152_0 >= KW_ENFORCED && LA152_0 <= KW_ESCAPED)||LA152_0==KW_EXCLUSIVE||(LA152_0 >= KW_EXPLAIN && LA152_0 <= KW_EXTENDED)||(LA152_0 >= KW_FIELDS && LA152_0 <= KW_FIRST)||(LA152_0 >= KW_FORMAT && LA152_0 <= KW_FORMATTED)||LA152_0==KW_FUNCTIONS||(LA152_0 >= KW_HOUR && LA152_0 <= KW_IDXPROPERTIES)||(LA152_0 >= KW_INDEX && LA152_0 <= KW_INDEXES)||(LA152_0 >= KW_INPATH && LA152_0 <= KW_INPUTFORMAT)||(LA152_0 >= KW_ISOLATION && LA152_0 <= KW_JAR)||(LA152_0 >= KW_KEY && LA152_0 <= KW_LAST)||LA152_0==KW_LEVEL||(LA152_0 >= KW_LIMIT && LA152_0 <= KW_LOAD)||(LA152_0 >= KW_LOCATION && LA152_0 <= KW_LONG)||LA152_0==KW_MANAGEMENT||(LA152_0 >= KW_MAPJOIN && LA152_0 <= KW_MATERIALIZED)||LA152_0==KW_METADATA||(LA152_0 >= KW_MINUTE && LA152_0 <= KW_MONTH)||(LA152_0 >= KW_MOVE && LA152_0 <= KW_MSCK)||(LA152_0 >= KW_NORELY && LA152_0 <= KW_NOSCAN)||LA152_0==KW_NOVALIDATE||LA152_0==KW_NULLS||LA152_0==KW_OFFSET||(LA152_0 >= KW_OPERATOR && LA152_0 <= KW_OPTION)||(LA152_0 >= KW_OUTPUTDRIVER && LA152_0 <= KW_OUTPUTFORMAT)||(LA152_0 >= KW_OVERWRITE && LA152_0 <= KW_OWNER)||(LA152_0 >= KW_PARTITIONED && LA152_0 <= KW_PATH)||(LA152_0 >= KW_PLAN && LA152_0 <= KW_POOL)||LA152_0==KW_PRINCIPALS||(LA152_0 >= KW_PURGE && LA152_0 <= KW_QUERY_PARALLELISM)||LA152_0==KW_READ||(LA152_0 >= KW_REBUILD && LA152_0 <= KW_RECORDWRITER)||(LA152_0 >= KW_RELOAD && LA152_0 <= KW_RESTRICT)||LA152_0==KW_REWRITE||(LA152_0 >= KW_ROLE && LA152_0 <= KW_ROLES)||LA152_0==KW_SCHEDULING_POLICY||(LA152_0 >= KW_SCHEMAS && LA152_0 <= KW_SECOND)||(LA152_0 >= KW_SEMI && LA152_0 <= KW_SERVER)||(LA152_0 >= KW_SETS && LA152_0 <= KW_SKEWED)||(LA152_0 >= KW_SNAPSHOT && LA152_0 <= KW_SSL)||(LA152_0 >= KW_STATISTICS && LA152_0 <= KW_SUMMARY)||LA152_0==KW_TABLES||(LA152_0 >= KW_TBLPROPERTIES && LA152_0 <= KW_TERMINATED)||LA152_0==KW_TINYINT||(LA152_0 >= KW_TOUCH && LA152_0 <= KW_TRANSACTIONS)||LA152_0==KW_UNARCHIVE||LA152_0==KW_UNDO||LA152_0==KW_UNIONTYPE||(LA152_0 >= KW_UNLOCK && LA152_0 <= KW_UNSIGNED)||(LA152_0 >= KW_URI && LA152_0 <= KW_USE)||(LA152_0 >= KW_UTC && LA152_0 <= KW_VALIDATE)||LA152_0==KW_VALUE_TYPE||(LA152_0 >= KW_VECTORIZATION && LA152_0 <= KW_WEEK)||LA152_0==KW_WHILE||(LA152_0 >= KW_WORK && LA152_0 <= KW_ZONE)||LA152_0==KW_BATCH||LA152_0==KW_DAYOFWEEK||LA152_0==KW_HOLD_DDLTIME||LA152_0==KW_IGNORE||LA152_0==KW_NO_DROP||LA152_0==KW_OFFLINE||LA152_0==KW_PROTECTION||LA152_0==KW_READONLY||LA152_0==KW_TIMESTAMPTZ) ) {
						alt152=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 152, 0, input);
						throw nvae;
					}

					switch (alt152) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:7: ( KW_DATABASE | KW_SCHEMA )=> ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) (isExtended= KW_EXTENDED )?
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:34: ( KW_DATABASE | KW_SCHEMA )
							int alt148=2;
							int LA148_0 = input.LA(1);
							if ( (LA148_0==KW_DATABASE) ) {
								alt148=1;
							}
							else if ( (LA148_0==KW_SCHEMA) ) {
								alt148=2;
							}

							else {
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 148, 0, input);
								throw nvae;
							}

							switch (alt148) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:35: KW_DATABASE
									{
									KW_DATABASE492=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_showStatement8208); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE492);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:47: KW_SCHEMA
									{
									KW_SCHEMA493=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_showStatement8210); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA493);

									}
									break;

							}

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:58: (dbName= identifier )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:59: dbName= identifier
							{
							pushFollow(FOLLOW_identifier_in_showStatement8216);
							dbName=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
							}

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:78: (isExtended= KW_EXTENDED )?
							int alt149=2;
							int LA149_0 = input.LA(1);
							if ( (LA149_0==KW_EXTENDED) ) {
								alt149=1;
							}
							switch (alt149) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:79: isExtended= KW_EXTENDED
									{
									isExtended=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_showStatement8222); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_EXTENDED.add(isExtended);

									}
									break;

							}

							// AST REWRITE
							// elements: dbName, isExtended
							// token labels: isExtended
							// rule labels: dbName, retval
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleTokenStream stream_isExtended=new RewriteRuleTokenStream(adaptor,"token isExtended",isExtended);
							RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 1586:104: -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? )
							{
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:107: ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? )
								{
								ASTNode root_1 = (ASTNode)adaptor.nil();
								root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWDBLOCKS, "TOK_SHOWDBLOCKS"), root_1);
								adaptor.addChild(root_1, stream_dbName.nextTree());
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:134: ( $isExtended)?
								if ( stream_isExtended.hasNext() ) {
									adaptor.addChild(root_1, stream_isExtended.nextNode());
								}
								stream_isExtended.reset();

								adaptor.addChild(root_0, root_1);
								}

							}


							retval.tree = root_0;
							}

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:7: (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )?
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:7: (parttype= partTypeExpr )?
							int alt150=2;
							int LA150_0 = input.LA(1);
							if ( (LA150_0==Identifier||(LA150_0 >= KW_ABORT && LA150_0 <= KW_AFTER)||LA150_0==KW_ALLOC_FRACTION||LA150_0==KW_ANALYZE||LA150_0==KW_ARCHIVE||LA150_0==KW_ASC||(LA150_0 >= KW_AUTOCOMMIT && LA150_0 <= KW_BEFORE)||(LA150_0 >= KW_BUCKET && LA150_0 <= KW_BUCKETS)||(LA150_0 >= KW_CACHE && LA150_0 <= KW_CASCADE)||LA150_0==KW_CHANGE||(LA150_0 >= KW_CHECK && LA150_0 <= KW_COLLECTION)||(LA150_0 >= KW_COLUMNS && LA150_0 <= KW_COMMENT)||(LA150_0 >= KW_COMPACT && LA150_0 <= KW_CONCATENATE)||LA150_0==KW_CONTINUE||LA150_0==KW_DATA||LA150_0==KW_DATABASES||(LA150_0 >= KW_DATETIME && LA150_0 <= KW_DBPROPERTIES)||(LA150_0 >= KW_DEFAULT && LA150_0 <= KW_DEFINED)||(LA150_0 >= KW_DELIMITED && LA150_0 <= KW_DESC)||(LA150_0 >= KW_DETAIL && LA150_0 <= KW_DISABLE)||(LA150_0 >= KW_DISTRIBUTE && LA150_0 <= KW_DO)||LA150_0==KW_DOW||(LA150_0 >= KW_DUMP && LA150_0 <= KW_ELEM_TYPE)||LA150_0==KW_ENABLE||(LA150_0 >= KW_ENFORCED && LA150_0 <= KW_ESCAPED)||LA150_0==KW_EXCLUSIVE||(LA150_0 >= KW_EXPLAIN && LA150_0 <= KW_EXPRESSION)||(LA150_0 >= KW_FIELDS && LA150_0 <= KW_FIRST)||(LA150_0 >= KW_FORMAT && LA150_0 <= KW_FORMATTED)||LA150_0==KW_FUNCTIONS||(LA150_0 >= KW_HOUR && LA150_0 <= KW_IDXPROPERTIES)||(LA150_0 >= KW_INDEX && LA150_0 <= KW_INDEXES)||(LA150_0 >= KW_INPATH && LA150_0 <= KW_INPUTFORMAT)||(LA150_0 >= KW_ISOLATION && LA150_0 <= KW_JAR)||(LA150_0 >= KW_KEY && LA150_0 <= KW_LAST)||LA150_0==KW_LEVEL||(LA150_0 >= KW_LIMIT && LA150_0 <= KW_LOAD)||(LA150_0 >= KW_LOCATION && LA150_0 <= KW_LONG)||LA150_0==KW_MANAGEMENT||(LA150_0 >= KW_MAPJOIN && LA150_0 <= KW_MATERIALIZED)||LA150_0==KW_METADATA||(LA150_0 >= KW_MINUTE && LA150_0 <= KW_MONTH)||(LA150_0 >= KW_MOVE && LA150_0 <= KW_MSCK)||(LA150_0 >= KW_NORELY && LA150_0 <= KW_NOSCAN)||LA150_0==KW_NOVALIDATE||LA150_0==KW_NULLS||LA150_0==KW_OFFSET||(LA150_0 >= KW_OPERATOR && LA150_0 <= KW_OPTION)||(LA150_0 >= KW_OUTPUTDRIVER && LA150_0 <= KW_OUTPUTFORMAT)||(LA150_0 >= KW_OVERWRITE && LA150_0 <= KW_OWNER)||(LA150_0 >= KW_PARTITIONED && LA150_0 <= KW_PATH)||(LA150_0 >= KW_PLAN && LA150_0 <= KW_POOL)||LA150_0==KW_PRINCIPALS||(LA150_0 >= KW_PURGE && LA150_0 <= KW_QUERY_PARALLELISM)||LA150_0==KW_READ||(LA150_0 >= KW_REBUILD && LA150_0 <= KW_RECORDWRITER)||(LA150_0 >= KW_RELOAD && LA150_0 <= KW_RESTRICT)||LA150_0==KW_REWRITE||(LA150_0 >= KW_ROLE && LA150_0 <= KW_ROLES)||(LA150_0 >= KW_SCHEDULING_POLICY && LA150_0 <= KW_SECOND)||(LA150_0 >= KW_SEMI && LA150_0 <= KW_SERVER)||(LA150_0 >= KW_SETS && LA150_0 <= KW_SKEWED)||(LA150_0 >= KW_SNAPSHOT && LA150_0 <= KW_SSL)||(LA150_0 >= KW_STATISTICS && LA150_0 <= KW_SUMMARY)||LA150_0==KW_TABLES||(LA150_0 >= KW_TBLPROPERTIES && LA150_0 <= KW_TERMINATED)||LA150_0==KW_TINYINT||(LA150_0 >= KW_TOUCH && LA150_0 <= KW_TRANSACTIONS)||LA150_0==KW_UNARCHIVE||LA150_0==KW_UNDO||LA150_0==KW_UNIONTYPE||(LA150_0 >= KW_UNLOCK && LA150_0 <= KW_UNSIGNED)||(LA150_0 >= KW_URI && LA150_0 <= KW_USE)||(LA150_0 >= KW_UTC && LA150_0 <= KW_VALIDATE)||LA150_0==KW_VALUE_TYPE||(LA150_0 >= KW_VECTORIZATION && LA150_0 <= KW_WEEK)||LA150_0==KW_WHILE||(LA150_0 >= KW_WORK && LA150_0 <= KW_ZONE)||LA150_0==KW_BATCH||LA150_0==KW_DAYOFWEEK||LA150_0==KW_HOLD_DDLTIME||LA150_0==KW_IGNORE||LA150_0==KW_NO_DROP||LA150_0==KW_OFFLINE||LA150_0==KW_PROTECTION||LA150_0==KW_READONLY||LA150_0==KW_TIMESTAMPTZ) ) {
								alt150=1;
							}
							switch (alt150) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:8: parttype= partTypeExpr
									{
									pushFollow(FOLLOW_partTypeExpr_in_showStatement8256);
									parttype=partTypeExpr();
									state._fsp--;
									if (state.failed) return retval;
									if ( state.backtracking==0 ) stream_partTypeExpr.add(parttype.getTree());
									}
									break;

							}

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:32: (isExtended= KW_EXTENDED )?
							int alt151=2;
							int LA151_0 = input.LA(1);
							if ( (LA151_0==KW_EXTENDED) ) {
								alt151=1;
							}
							switch (alt151) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:33: isExtended= KW_EXTENDED
									{
									isExtended=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_showStatement8263); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_EXTENDED.add(isExtended);

									}
									break;

							}

							// AST REWRITE
							// elements: parttype, isExtended
							// token labels: isExtended
							// rule labels: parttype, retval
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleTokenStream stream_isExtended=new RewriteRuleTokenStream(adaptor,"token isExtended",isExtended);
							RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.getTree():null);
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 1588:58: -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? )
							{
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:61: ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? )
								{
								ASTNode root_1 = (ASTNode)adaptor.nil();
								root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWLOCKS, "TOK_SHOWLOCKS"), root_1);
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:78: ( $parttype)?
								if ( stream_parttype.hasNext() ) {
									adaptor.addChild(root_1, stream_parttype.nextTree());
								}
								stream_parttype.reset();

								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:89: ( $isExtended)?
								if ( stream_isExtended.hasNext() ) {
									adaptor.addChild(root_1, stream_isExtended.nextNode());
								}
								stream_isExtended.reset();

								adaptor.addChild(root_0, root_1);
								}

							}


							retval.tree = root_0;
							}

							}
							break;

					}

					}
					break;
				case 12 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1590:7: KW_SHOW KW_COMPACTIONS
					{
					KW_SHOW494=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8295); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW494);

					KW_COMPACTIONS495=(Token)match(input,KW_COMPACTIONS,FOLLOW_KW_COMPACTIONS_in_showStatement8297); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMPACTIONS.add(KW_COMPACTIONS495);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1590:30: -> ^( TOK_SHOW_COMPACTIONS )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1590:33: ^( TOK_SHOW_COMPACTIONS )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_COMPACTIONS, "TOK_SHOW_COMPACTIONS"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 13 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1591:7: KW_SHOW KW_TRANSACTIONS
					{
					KW_SHOW496=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8311); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW496);

					KW_TRANSACTIONS497=(Token)match(input,KW_TRANSACTIONS,FOLLOW_KW_TRANSACTIONS_in_showStatement8313); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TRANSACTIONS.add(KW_TRANSACTIONS497);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1591:31: -> ^( TOK_SHOW_TRANSACTIONS )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1591:34: ^( TOK_SHOW_TRANSACTIONS )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_TRANSACTIONS, "TOK_SHOW_TRANSACTIONS"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 14 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1592:7: KW_SHOW KW_CONF StringLiteral
					{
					KW_SHOW498=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8327); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW498);

					KW_CONF499=(Token)match(input,KW_CONF,FOLLOW_KW_CONF_in_showStatement8329); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CONF.add(KW_CONF499);

					StringLiteral500=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showStatement8331); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(StringLiteral500);

					// AST REWRITE
					// elements: StringLiteral
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1592:37: -> ^( TOK_SHOWCONF StringLiteral )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1592:40: ^( TOK_SHOWCONF StringLiteral )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOWCONF, "TOK_SHOWCONF"), root_1);
						adaptor.addChild(root_1, stream_StringLiteral.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 15 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1593:7: KW_SHOW KW_RESOURCE ( ( KW_PLAN rp_name= identifier -> ^( TOK_SHOW_RP $rp_name) ) | ( KW_PLANS -> ^( TOK_SHOW_RP ) ) )
					{
					KW_SHOW501=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement8347); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW501);

					KW_RESOURCE502=(Token)match(input,KW_RESOURCE,FOLLOW_KW_RESOURCE_in_showStatement8349); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_RESOURCE.add(KW_RESOURCE502);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1594:7: ( ( KW_PLAN rp_name= identifier -> ^( TOK_SHOW_RP $rp_name) ) | ( KW_PLANS -> ^( TOK_SHOW_RP ) ) )
					int alt153=2;
					int LA153_0 = input.LA(1);
					if ( (LA153_0==KW_PLAN) ) {
						alt153=1;
					}
					else if ( (LA153_0==KW_PLANS) ) {
						alt153=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 153, 0, input);
						throw nvae;
					}

					switch (alt153) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1595:9: ( KW_PLAN rp_name= identifier -> ^( TOK_SHOW_RP $rp_name) )
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1595:9: ( KW_PLAN rp_name= identifier -> ^( TOK_SHOW_RP $rp_name) )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1595:10: KW_PLAN rp_name= identifier
							{
							KW_PLAN503=(Token)match(input,KW_PLAN,FOLLOW_KW_PLAN_in_showStatement8368); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_PLAN.add(KW_PLAN503);

							pushFollow(FOLLOW_identifier_in_showStatement8372);
							rp_name=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_identifier.add(rp_name.getTree());
							// AST REWRITE
							// elements: rp_name
							// token labels: 
							// rule labels: retval, rp_name
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
							RewriteRuleSubtreeStream stream_rp_name=new RewriteRuleSubtreeStream(adaptor,"rule rp_name",rp_name!=null?rp_name.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 1595:37: -> ^( TOK_SHOW_RP $rp_name)
							{
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1595:40: ^( TOK_SHOW_RP $rp_name)
								{
								ASTNode root_1 = (ASTNode)adaptor.nil();
								root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_RP, "TOK_SHOW_RP"), root_1);
								adaptor.addChild(root_1, stream_rp_name.nextTree());
								adaptor.addChild(root_0, root_1);
								}

							}


							retval.tree = root_0;
							}

							}

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1596:11: ( KW_PLANS -> ^( TOK_SHOW_RP ) )
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1596:11: ( KW_PLANS -> ^( TOK_SHOW_RP ) )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1596:12: KW_PLANS
							{
							KW_PLANS504=(Token)match(input,KW_PLANS,FOLLOW_KW_PLANS_in_showStatement8395); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_PLANS.add(KW_PLANS504);

							// AST REWRITE
							// elements: 
							// token labels: 
							// rule labels: retval
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 1596:21: -> ^( TOK_SHOW_RP )
							{
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:1596:24: ^( TOK_SHOW_RP )
								{
								ASTNode root_1 = (ASTNode)adaptor.nil();
								root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_RP, "TOK_SHOW_RP"), root_1);
								adaptor.addChild(root_0, root_1);
								}

							}


							retval.tree = root_0;
							}

							}

							}
							break;

					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showStatement"


	public static class lockStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "lockStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1600:1: lockStatement : KW_LOCK KW_TABLE tableName ( partitionSpec )? lockMode -> ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? ) ;
	public final HiveParser.lockStatement_return lockStatement() throws RecognitionException {
		HiveParser.lockStatement_return retval = new HiveParser.lockStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_LOCK505=null;
		Token KW_TABLE506=null;
		ParserRuleReturnScope tableName507 =null;
		ParserRuleReturnScope partitionSpec508 =null;
		ParserRuleReturnScope lockMode509 =null;

		ASTNode KW_LOCK505_tree=null;
		ASTNode KW_TABLE506_tree=null;
		RewriteRuleTokenStream stream_KW_LOCK=new RewriteRuleTokenStream(adaptor,"token KW_LOCK");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleSubtreeStream stream_lockMode=new RewriteRuleSubtreeStream(adaptor,"rule lockMode");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("lock statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:5: ( KW_LOCK KW_TABLE tableName ( partitionSpec )? lockMode -> ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:7: KW_LOCK KW_TABLE tableName ( partitionSpec )? lockMode
			{
			KW_LOCK505=(Token)match(input,KW_LOCK,FOLLOW_KW_LOCK_in_lockStatement8437); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCK.add(KW_LOCK505);

			KW_TABLE506=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_lockStatement8439); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE506);

			pushFollow(FOLLOW_tableName_in_lockStatement8441);
			tableName507=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName507.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:34: ( partitionSpec )?
			int alt155=2;
			int LA155_0 = input.LA(1);
			if ( (LA155_0==KW_PARTITION) ) {
				alt155=1;
			}
			switch (alt155) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:34: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_lockStatement8443);
					partitionSpec508=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec508.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_lockMode_in_lockStatement8446);
			lockMode509=lockMode();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_lockMode.add(lockMode509.getTree());
			// AST REWRITE
			// elements: tableName, partitionSpec, lockMode
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1603:58: -> ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:61: ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_LOCKTABLE, "TOK_LOCKTABLE"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				adaptor.addChild(root_1, stream_lockMode.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:96: ( partitionSpec )?
				if ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "lockStatement"


	public static class lockDatabase_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "lockDatabase"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1606:1: lockDatabase : KW_LOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) lockMode -> ^( TOK_LOCKDB $dbName lockMode ) ;
	public final HiveParser.lockDatabase_return lockDatabase() throws RecognitionException {
		HiveParser.lockDatabase_return retval = new HiveParser.lockDatabase_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_LOCK510=null;
		Token KW_DATABASE511=null;
		Token KW_SCHEMA512=null;
		ParserRuleReturnScope dbName =null;
		ParserRuleReturnScope lockMode513 =null;

		ASTNode KW_LOCK510_tree=null;
		ASTNode KW_DATABASE511_tree=null;
		ASTNode KW_SCHEMA512_tree=null;
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_KW_LOCK=new RewriteRuleTokenStream(adaptor,"token KW_LOCK");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_lockMode=new RewriteRuleSubtreeStream(adaptor,"rule lockMode");

		 pushMsg("lock database statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:5: ( KW_LOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) lockMode -> ^( TOK_LOCKDB $dbName lockMode ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:7: KW_LOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) lockMode
			{
			KW_LOCK510=(Token)match(input,KW_LOCK,FOLLOW_KW_LOCK_in_lockDatabase8486); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCK.add(KW_LOCK510);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:15: ( KW_DATABASE | KW_SCHEMA )
			int alt156=2;
			int LA156_0 = input.LA(1);
			if ( (LA156_0==KW_DATABASE) ) {
				alt156=1;
			}
			else if ( (LA156_0==KW_SCHEMA) ) {
				alt156=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 156, 0, input);
				throw nvae;
			}

			switch (alt156) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:16: KW_DATABASE
					{
					KW_DATABASE511=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_lockDatabase8489); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE511);

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:28: KW_SCHEMA
					{
					KW_SCHEMA512=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_lockDatabase8491); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA512);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:39: (dbName= identifier )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:40: dbName= identifier
			{
			pushFollow(FOLLOW_identifier_in_lockDatabase8497);
			dbName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
			}

			pushFollow(FOLLOW_lockMode_in_lockDatabase8500);
			lockMode513=lockMode();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_lockMode.add(lockMode513.getTree());
			// AST REWRITE
			// elements: lockMode, dbName
			// token labels: 
			// rule labels: dbName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1609:68: -> ^( TOK_LOCKDB $dbName lockMode )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1609:71: ^( TOK_LOCKDB $dbName lockMode )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_LOCKDB, "TOK_LOCKDB"), root_1);
				adaptor.addChild(root_1, stream_dbName.nextTree());
				adaptor.addChild(root_1, stream_lockMode.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "lockDatabase"


	public static class lockMode_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "lockMode"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1612:1: lockMode : ( KW_SHARED | KW_EXCLUSIVE );
	public final HiveParser.lockMode_return lockMode() throws RecognitionException {
		HiveParser.lockMode_return retval = new HiveParser.lockMode_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token set514=null;

		ASTNode set514_tree=null;

		 pushMsg("lock mode", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1615:5: ( KW_SHARED | KW_EXCLUSIVE )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:
			{
			root_0 = (ASTNode)adaptor.nil();


			set514=input.LT(1);
			if ( input.LA(1)==KW_EXCLUSIVE||input.LA(1)==KW_SHARED ) {
				input.consume();
				if ( state.backtracking==0 ) adaptor.addChild(root_0, (ASTNode)adaptor.create(set514));
				state.errorRecovery=false;
				state.failed=false;
			}
			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				MismatchedSetException mse = new MismatchedSetException(null,input);
				throw mse;
			}
			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "lockMode"


	public static class unlockStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "unlockStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1618:1: unlockStatement : KW_UNLOCK KW_TABLE tableName ( partitionSpec )? -> ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? ) ;
	public final HiveParser.unlockStatement_return unlockStatement() throws RecognitionException {
		HiveParser.unlockStatement_return retval = new HiveParser.unlockStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UNLOCK515=null;
		Token KW_TABLE516=null;
		ParserRuleReturnScope tableName517 =null;
		ParserRuleReturnScope partitionSpec518 =null;

		ASTNode KW_UNLOCK515_tree=null;
		ASTNode KW_TABLE516_tree=null;
		RewriteRuleTokenStream stream_KW_UNLOCK=new RewriteRuleTokenStream(adaptor,"token KW_UNLOCK");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("unlock statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:5: ( KW_UNLOCK KW_TABLE tableName ( partitionSpec )? -> ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:7: KW_UNLOCK KW_TABLE tableName ( partitionSpec )?
			{
			KW_UNLOCK515=(Token)match(input,KW_UNLOCK,FOLLOW_KW_UNLOCK_in_unlockStatement8569); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UNLOCK.add(KW_UNLOCK515);

			KW_TABLE516=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_unlockStatement8571); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE516);

			pushFollow(FOLLOW_tableName_in_unlockStatement8573);
			tableName517=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName517.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:36: ( partitionSpec )?
			int alt157=2;
			int LA157_0 = input.LA(1);
			if ( (LA157_0==KW_PARTITION) ) {
				alt157=1;
			}
			switch (alt157) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:36: partitionSpec
					{
					pushFollow(FOLLOW_partitionSpec_in_unlockStatement8575);
					partitionSpec518=partitionSpec();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec518.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: tableName, partitionSpec
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1621:52: -> ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:55: ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UNLOCKTABLE, "TOK_UNLOCKTABLE"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:83: ( partitionSpec )?
				if ( stream_partitionSpec.hasNext() ) {
					adaptor.addChild(root_1, stream_partitionSpec.nextTree());
				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "unlockStatement"


	public static class unlockDatabase_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "unlockDatabase"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1624:1: unlockDatabase : KW_UNLOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) -> ^( TOK_UNLOCKDB $dbName) ;
	public final HiveParser.unlockDatabase_return unlockDatabase() throws RecognitionException {
		HiveParser.unlockDatabase_return retval = new HiveParser.unlockDatabase_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UNLOCK519=null;
		Token KW_DATABASE520=null;
		Token KW_SCHEMA521=null;
		ParserRuleReturnScope dbName =null;

		ASTNode KW_UNLOCK519_tree=null;
		ASTNode KW_DATABASE520_tree=null;
		ASTNode KW_SCHEMA521_tree=null;
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_KW_UNLOCK=new RewriteRuleTokenStream(adaptor,"token KW_UNLOCK");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		 pushMsg("unlock database statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:5: ( KW_UNLOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier ) -> ^( TOK_UNLOCKDB $dbName) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:7: KW_UNLOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= identifier )
			{
			KW_UNLOCK519=(Token)match(input,KW_UNLOCK,FOLLOW_KW_UNLOCK_in_unlockDatabase8615); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UNLOCK.add(KW_UNLOCK519);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:17: ( KW_DATABASE | KW_SCHEMA )
			int alt158=2;
			int LA158_0 = input.LA(1);
			if ( (LA158_0==KW_DATABASE) ) {
				alt158=1;
			}
			else if ( (LA158_0==KW_SCHEMA) ) {
				alt158=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 158, 0, input);
				throw nvae;
			}

			switch (alt158) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:18: KW_DATABASE
					{
					KW_DATABASE520=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_unlockDatabase8618); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE520);

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:30: KW_SCHEMA
					{
					KW_SCHEMA521=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_unlockDatabase8620); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA521);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:41: (dbName= identifier )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:42: dbName= identifier
			{
			pushFollow(FOLLOW_identifier_in_unlockDatabase8626);
			dbName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(dbName.getTree());
			}

			// AST REWRITE
			// elements: dbName
			// token labels: 
			// rule labels: dbName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1627:61: -> ^( TOK_UNLOCKDB $dbName)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:64: ^( TOK_UNLOCKDB $dbName)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UNLOCKDB, "TOK_UNLOCKDB"), root_1);
				adaptor.addChild(root_1, stream_dbName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "unlockDatabase"


	public static class createRoleStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createRoleStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1630:1: createRoleStatement : KW_CREATE KW_ROLE roleName= identifier -> ^( TOK_CREATEROLE $roleName) ;
	public final HiveParser.createRoleStatement_return createRoleStatement() throws RecognitionException {
		HiveParser.createRoleStatement_return retval = new HiveParser.createRoleStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CREATE522=null;
		Token KW_ROLE523=null;
		ParserRuleReturnScope roleName =null;

		ASTNode KW_CREATE522_tree=null;
		ASTNode KW_ROLE523_tree=null;
		RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		 pushMsg("create role", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1633:5: ( KW_CREATE KW_ROLE roleName= identifier -> ^( TOK_CREATEROLE $roleName) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1633:7: KW_CREATE KW_ROLE roleName= identifier
			{
			KW_CREATE522=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createRoleStatement8663); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE522);

			KW_ROLE523=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_createRoleStatement8665); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROLE.add(KW_ROLE523);

			pushFollow(FOLLOW_identifier_in_createRoleStatement8669);
			roleName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(roleName.getTree());
			// AST REWRITE
			// elements: roleName
			// token labels: 
			// rule labels: roleName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_roleName=new RewriteRuleSubtreeStream(adaptor,"rule roleName",roleName!=null?roleName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1634:5: -> ^( TOK_CREATEROLE $roleName)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1634:8: ^( TOK_CREATEROLE $roleName)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATEROLE, "TOK_CREATEROLE"), root_1);
				adaptor.addChild(root_1, stream_roleName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createRoleStatement"


	public static class dropRoleStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dropRoleStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1637:1: dropRoleStatement : KW_DROP KW_ROLE roleName= identifier -> ^( TOK_DROPROLE $roleName) ;
	public final HiveParser.dropRoleStatement_return dropRoleStatement() throws RecognitionException {
		HiveParser.dropRoleStatement_return retval = new HiveParser.dropRoleStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP524=null;
		Token KW_ROLE525=null;
		ParserRuleReturnScope roleName =null;

		ASTNode KW_DROP524_tree=null;
		ASTNode KW_ROLE525_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		pushMsg("drop role", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1640:5: ( KW_DROP KW_ROLE roleName= identifier -> ^( TOK_DROPROLE $roleName) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1640:7: KW_DROP KW_ROLE roleName= identifier
			{
			KW_DROP524=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropRoleStatement8709); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP524);

			KW_ROLE525=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_dropRoleStatement8711); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROLE.add(KW_ROLE525);

			pushFollow(FOLLOW_identifier_in_dropRoleStatement8715);
			roleName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(roleName.getTree());
			// AST REWRITE
			// elements: roleName
			// token labels: 
			// rule labels: roleName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_roleName=new RewriteRuleSubtreeStream(adaptor,"rule roleName",roleName!=null?roleName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1641:5: -> ^( TOK_DROPROLE $roleName)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1641:8: ^( TOK_DROPROLE $roleName)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROPROLE, "TOK_DROPROLE"), root_1);
				adaptor.addChild(root_1, stream_roleName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dropRoleStatement"


	public static class grantPrivileges_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "grantPrivileges"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1644:1: grantPrivileges : KW_GRANT privList= privilegeList ( privilegeObject )? KW_TO principalSpecification ( withGrantOption )? -> ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? ) ;
	public final HiveParser.grantPrivileges_return grantPrivileges() throws RecognitionException {
		HiveParser.grantPrivileges_return retval = new HiveParser.grantPrivileges_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_GRANT526=null;
		Token KW_TO528=null;
		ParserRuleReturnScope privList =null;
		ParserRuleReturnScope privilegeObject527 =null;
		ParserRuleReturnScope principalSpecification529 =null;
		ParserRuleReturnScope withGrantOption530 =null;

		ASTNode KW_GRANT526_tree=null;
		ASTNode KW_TO528_tree=null;
		RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
		RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
		RewriteRuleSubtreeStream stream_withGrantOption=new RewriteRuleSubtreeStream(adaptor,"rule withGrantOption");
		RewriteRuleSubtreeStream stream_privilegeList=new RewriteRuleSubtreeStream(adaptor,"rule privilegeList");
		RewriteRuleSubtreeStream stream_privilegeObject=new RewriteRuleSubtreeStream(adaptor,"rule privilegeObject");
		RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");

		pushMsg("grant privileges", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1647:5: ( KW_GRANT privList= privilegeList ( privilegeObject )? KW_TO principalSpecification ( withGrantOption )? -> ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1647:7: KW_GRANT privList= privilegeList ( privilegeObject )? KW_TO principalSpecification ( withGrantOption )?
			{
			KW_GRANT526=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_grantPrivileges8755); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_GRANT.add(KW_GRANT526);

			pushFollow(FOLLOW_privilegeList_in_grantPrivileges8759);
			privList=privilegeList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_privilegeList.add(privList.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1648:7: ( privilegeObject )?
			int alt159=2;
			int LA159_0 = input.LA(1);
			if ( (LA159_0==KW_ON) ) {
				alt159=1;
			}
			switch (alt159) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1648:7: privilegeObject
					{
					pushFollow(FOLLOW_privilegeObject_in_grantPrivileges8767);
					privilegeObject527=privilegeObject();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_privilegeObject.add(privilegeObject527.getTree());
					}
					break;

			}

			KW_TO528=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_grantPrivileges8776); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TO.add(KW_TO528);

			pushFollow(FOLLOW_principalSpecification_in_grantPrivileges8778);
			principalSpecification529=principalSpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalSpecification.add(principalSpecification529.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1650:7: ( withGrantOption )?
			int alt160=2;
			int LA160_0 = input.LA(1);
			if ( (LA160_0==KW_WITH) ) {
				alt160=1;
			}
			switch (alt160) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1650:7: withGrantOption
					{
					pushFollow(FOLLOW_withGrantOption_in_grantPrivileges8786);
					withGrantOption530=withGrantOption();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_withGrantOption.add(withGrantOption530.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: privList, withGrantOption, privilegeObject, principalSpecification
			// token labels: 
			// rule labels: privList, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_privList=new RewriteRuleSubtreeStream(adaptor,"rule privList",privList!=null?privList.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1651:5: -> ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1651:8: ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_GRANT, "TOK_GRANT"), root_1);
				adaptor.addChild(root_1, stream_privList.nextTree());
				adaptor.addChild(root_1, stream_principalSpecification.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1651:53: ( privilegeObject )?
				if ( stream_privilegeObject.hasNext() ) {
					adaptor.addChild(root_1, stream_privilegeObject.nextTree());
				}
				stream_privilegeObject.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1651:70: ( withGrantOption )?
				if ( stream_withGrantOption.hasNext() ) {
					adaptor.addChild(root_1, stream_withGrantOption.nextTree());
				}
				stream_withGrantOption.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "grantPrivileges"


	public static class revokePrivileges_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "revokePrivileges"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1654:1: revokePrivileges : KW_REVOKE ( grantOptionFor )? privilegeList ( privilegeObject )? KW_FROM principalSpecification -> ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? ) ;
	public final HiveParser.revokePrivileges_return revokePrivileges() throws RecognitionException {
		HiveParser.revokePrivileges_return retval = new HiveParser.revokePrivileges_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_REVOKE531=null;
		Token KW_FROM535=null;
		ParserRuleReturnScope grantOptionFor532 =null;
		ParserRuleReturnScope privilegeList533 =null;
		ParserRuleReturnScope privilegeObject534 =null;
		ParserRuleReturnScope principalSpecification536 =null;

		ASTNode KW_REVOKE531_tree=null;
		ASTNode KW_FROM535_tree=null;
		RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
		RewriteRuleTokenStream stream_KW_REVOKE=new RewriteRuleTokenStream(adaptor,"token KW_REVOKE");
		RewriteRuleSubtreeStream stream_grantOptionFor=new RewriteRuleSubtreeStream(adaptor,"rule grantOptionFor");
		RewriteRuleSubtreeStream stream_privilegeList=new RewriteRuleSubtreeStream(adaptor,"rule privilegeList");
		RewriteRuleSubtreeStream stream_privilegeObject=new RewriteRuleSubtreeStream(adaptor,"rule privilegeObject");
		RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");

		pushMsg("revoke privileges", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1657:5: ( KW_REVOKE ( grantOptionFor )? privilegeList ( privilegeObject )? KW_FROM principalSpecification -> ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1657:7: KW_REVOKE ( grantOptionFor )? privilegeList ( privilegeObject )? KW_FROM principalSpecification
			{
			KW_REVOKE531=(Token)match(input,KW_REVOKE,FOLLOW_KW_REVOKE_in_revokePrivileges8835); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REVOKE.add(KW_REVOKE531);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1657:17: ( grantOptionFor )?
			int alt161=2;
			int LA161_0 = input.LA(1);
			if ( (LA161_0==KW_GRANT) ) {
				alt161=1;
			}
			switch (alt161) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1657:17: grantOptionFor
					{
					pushFollow(FOLLOW_grantOptionFor_in_revokePrivileges8837);
					grantOptionFor532=grantOptionFor();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_grantOptionFor.add(grantOptionFor532.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_privilegeList_in_revokePrivileges8840);
			privilegeList533=privilegeList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_privilegeList.add(privilegeList533.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1657:47: ( privilegeObject )?
			int alt162=2;
			int LA162_0 = input.LA(1);
			if ( (LA162_0==KW_ON) ) {
				alt162=1;
			}
			switch (alt162) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1657:47: privilegeObject
					{
					pushFollow(FOLLOW_privilegeObject_in_revokePrivileges8842);
					privilegeObject534=privilegeObject();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_privilegeObject.add(privilegeObject534.getTree());
					}
					break;

			}

			KW_FROM535=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_revokePrivileges8845); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM535);

			pushFollow(FOLLOW_principalSpecification_in_revokePrivileges8847);
			principalSpecification536=principalSpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalSpecification.add(principalSpecification536.getTree());
			// AST REWRITE
			// elements: grantOptionFor, privilegeObject, privilegeList, principalSpecification
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1658:5: -> ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1658:8: ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REVOKE, "TOK_REVOKE"), root_1);
				adaptor.addChild(root_1, stream_privilegeList.nextTree());
				adaptor.addChild(root_1, stream_principalSpecification.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1658:58: ( privilegeObject )?
				if ( stream_privilegeObject.hasNext() ) {
					adaptor.addChild(root_1, stream_privilegeObject.nextTree());
				}
				stream_privilegeObject.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1658:75: ( grantOptionFor )?
				if ( stream_grantOptionFor.hasNext() ) {
					adaptor.addChild(root_1, stream_grantOptionFor.nextTree());
				}
				stream_grantOptionFor.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "revokePrivileges"


	public static class grantRole_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "grantRole"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1661:1: grantRole : KW_GRANT ( KW_ROLE )? identifier ( COMMA identifier )* KW_TO principalSpecification ( withAdminOption )? -> ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ ) ;
	public final HiveParser.grantRole_return grantRole() throws RecognitionException {
		HiveParser.grantRole_return retval = new HiveParser.grantRole_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_GRANT537=null;
		Token KW_ROLE538=null;
		Token COMMA540=null;
		Token KW_TO542=null;
		ParserRuleReturnScope identifier539 =null;
		ParserRuleReturnScope identifier541 =null;
		ParserRuleReturnScope principalSpecification543 =null;
		ParserRuleReturnScope withAdminOption544 =null;

		ASTNode KW_GRANT537_tree=null;
		ASTNode KW_ROLE538_tree=null;
		ASTNode COMMA540_tree=null;
		ASTNode KW_TO542_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
		RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
		RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_withAdminOption=new RewriteRuleSubtreeStream(adaptor,"rule withAdminOption");
		RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");

		pushMsg("grant role", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:5: ( KW_GRANT ( KW_ROLE )? identifier ( COMMA identifier )* KW_TO principalSpecification ( withAdminOption )? -> ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:7: KW_GRANT ( KW_ROLE )? identifier ( COMMA identifier )* KW_TO principalSpecification ( withAdminOption )?
			{
			KW_GRANT537=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_grantRole8894); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_GRANT.add(KW_GRANT537);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:16: ( KW_ROLE )?
			int alt163=2;
			int LA163_0 = input.LA(1);
			if ( (LA163_0==KW_ROLE) ) {
				int LA163_1 = input.LA(2);
				if ( (LA163_1==Identifier||(LA163_1 >= KW_ABORT && LA163_1 <= KW_AFTER)||LA163_1==KW_ALLOC_FRACTION||LA163_1==KW_ANALYZE||LA163_1==KW_ARCHIVE||LA163_1==KW_ASC||(LA163_1 >= KW_AUTOCOMMIT && LA163_1 <= KW_BEFORE)||(LA163_1 >= KW_BUCKET && LA163_1 <= KW_BUCKETS)||(LA163_1 >= KW_CACHE && LA163_1 <= KW_CASCADE)||LA163_1==KW_CHANGE||(LA163_1 >= KW_CHECK && LA163_1 <= KW_COLLECTION)||(LA163_1 >= KW_COLUMNS && LA163_1 <= KW_COMMENT)||(LA163_1 >= KW_COMPACT && LA163_1 <= KW_CONCATENATE)||LA163_1==KW_CONTINUE||LA163_1==KW_DATA||LA163_1==KW_DATABASES||(LA163_1 >= KW_DATETIME && LA163_1 <= KW_DBPROPERTIES)||(LA163_1 >= KW_DEFAULT && LA163_1 <= KW_DEFINED)||(LA163_1 >= KW_DELIMITED && LA163_1 <= KW_DESC)||(LA163_1 >= KW_DETAIL && LA163_1 <= KW_DISABLE)||(LA163_1 >= KW_DISTRIBUTE && LA163_1 <= KW_DO)||LA163_1==KW_DOW||(LA163_1 >= KW_DUMP && LA163_1 <= KW_ELEM_TYPE)||LA163_1==KW_ENABLE||(LA163_1 >= KW_ENFORCED && LA163_1 <= KW_ESCAPED)||LA163_1==KW_EXCLUSIVE||(LA163_1 >= KW_EXPLAIN && LA163_1 <= KW_EXPRESSION)||(LA163_1 >= KW_FIELDS && LA163_1 <= KW_FIRST)||(LA163_1 >= KW_FORMAT && LA163_1 <= KW_FORMATTED)||LA163_1==KW_FUNCTIONS||(LA163_1 >= KW_HOUR && LA163_1 <= KW_IDXPROPERTIES)||(LA163_1 >= KW_INDEX && LA163_1 <= KW_INDEXES)||(LA163_1 >= KW_INPATH && LA163_1 <= KW_INPUTFORMAT)||(LA163_1 >= KW_ISOLATION && LA163_1 <= KW_JAR)||(LA163_1 >= KW_KEY && LA163_1 <= KW_LAST)||LA163_1==KW_LEVEL||(LA163_1 >= KW_LIMIT && LA163_1 <= KW_LOAD)||(LA163_1 >= KW_LOCATION && LA163_1 <= KW_LONG)||LA163_1==KW_MANAGEMENT||(LA163_1 >= KW_MAPJOIN && LA163_1 <= KW_MATERIALIZED)||LA163_1==KW_METADATA||(LA163_1 >= KW_MINUTE && LA163_1 <= KW_MONTH)||(LA163_1 >= KW_MOVE && LA163_1 <= KW_MSCK)||(LA163_1 >= KW_NORELY && LA163_1 <= KW_NOSCAN)||LA163_1==KW_NOVALIDATE||LA163_1==KW_NULLS||LA163_1==KW_OFFSET||(LA163_1 >= KW_OPERATOR && LA163_1 <= KW_OPTION)||(LA163_1 >= KW_OUTPUTDRIVER && LA163_1 <= KW_OUTPUTFORMAT)||(LA163_1 >= KW_OVERWRITE && LA163_1 <= KW_OWNER)||(LA163_1 >= KW_PARTITIONED && LA163_1 <= KW_PATH)||(LA163_1 >= KW_PLAN && LA163_1 <= KW_POOL)||LA163_1==KW_PRINCIPALS||(LA163_1 >= KW_PURGE && LA163_1 <= KW_QUERY_PARALLELISM)||LA163_1==KW_READ||(LA163_1 >= KW_REBUILD && LA163_1 <= KW_RECORDWRITER)||(LA163_1 >= KW_RELOAD && LA163_1 <= KW_RESTRICT)||LA163_1==KW_REWRITE||(LA163_1 >= KW_ROLE && LA163_1 <= KW_ROLES)||(LA163_1 >= KW_SCHEDULING_POLICY && LA163_1 <= KW_SECOND)||(LA163_1 >= KW_SEMI && LA163_1 <= KW_SERVER)||(LA163_1 >= KW_SETS && LA163_1 <= KW_SKEWED)||(LA163_1 >= KW_SNAPSHOT && LA163_1 <= KW_SSL)||(LA163_1 >= KW_STATISTICS && LA163_1 <= KW_SUMMARY)||LA163_1==KW_TABLES||(LA163_1 >= KW_TBLPROPERTIES && LA163_1 <= KW_TERMINATED)||LA163_1==KW_TINYINT||(LA163_1 >= KW_TOUCH && LA163_1 <= KW_TRANSACTIONS)||LA163_1==KW_UNARCHIVE||LA163_1==KW_UNDO||LA163_1==KW_UNIONTYPE||(LA163_1 >= KW_UNLOCK && LA163_1 <= KW_UNSIGNED)||(LA163_1 >= KW_URI && LA163_1 <= KW_USE)||(LA163_1 >= KW_UTC && LA163_1 <= KW_VALIDATE)||LA163_1==KW_VALUE_TYPE||(LA163_1 >= KW_VECTORIZATION && LA163_1 <= KW_WEEK)||LA163_1==KW_WHILE||(LA163_1 >= KW_WORK && LA163_1 <= KW_ZONE)||LA163_1==KW_BATCH||LA163_1==KW_DAYOFWEEK||LA163_1==KW_HOLD_DDLTIME||LA163_1==KW_IGNORE||LA163_1==KW_NO_DROP||LA163_1==KW_OFFLINE||LA163_1==KW_PROTECTION||LA163_1==KW_READONLY||LA163_1==KW_TIMESTAMPTZ) ) {
					alt163=1;
				}
			}
			switch (alt163) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:16: KW_ROLE
					{
					KW_ROLE538=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_grantRole8896); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ROLE.add(KW_ROLE538);

					}
					break;

			}

			pushFollow(FOLLOW_identifier_in_grantRole8899);
			identifier539=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(identifier539.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:36: ( COMMA identifier )*
			loop164:
			while (true) {
				int alt164=2;
				int LA164_0 = input.LA(1);
				if ( (LA164_0==COMMA) ) {
					alt164=1;
				}

				switch (alt164) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:37: COMMA identifier
					{
					COMMA540=(Token)match(input,COMMA,FOLLOW_COMMA_in_grantRole8902); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA540);

					pushFollow(FOLLOW_identifier_in_grantRole8904);
					identifier541=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier541.getTree());
					}
					break;

				default :
					break loop164;
				}
			}

			KW_TO542=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_grantRole8908); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TO.add(KW_TO542);

			pushFollow(FOLLOW_principalSpecification_in_grantRole8910);
			principalSpecification543=principalSpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalSpecification.add(principalSpecification543.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:85: ( withAdminOption )?
			int alt165=2;
			int LA165_0 = input.LA(1);
			if ( (LA165_0==KW_WITH) ) {
				alt165=1;
			}
			switch (alt165) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:85: withAdminOption
					{
					pushFollow(FOLLOW_withAdminOption_in_grantRole8912);
					withAdminOption544=withAdminOption();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_withAdminOption.add(withAdminOption544.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: principalSpecification, identifier, withAdminOption
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1665:5: -> ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1665:8: ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_GRANT_ROLE, "TOK_GRANT_ROLE"), root_1);
				adaptor.addChild(root_1, stream_principalSpecification.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1665:48: ( withAdminOption )?
				if ( stream_withAdminOption.hasNext() ) {
					adaptor.addChild(root_1, stream_withAdminOption.nextTree());
				}
				stream_withAdminOption.reset();

				if ( !(stream_identifier.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_identifier.hasNext() ) {
					adaptor.addChild(root_1, stream_identifier.nextTree());
				}
				stream_identifier.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "grantRole"


	public static class revokeRole_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "revokeRole"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1668:1: revokeRole : KW_REVOKE ( adminOptionFor )? ( KW_ROLE )? identifier ( COMMA identifier )* KW_FROM principalSpecification -> ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ ) ;
	public final HiveParser.revokeRole_return revokeRole() throws RecognitionException {
		HiveParser.revokeRole_return retval = new HiveParser.revokeRole_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_REVOKE545=null;
		Token KW_ROLE547=null;
		Token COMMA549=null;
		Token KW_FROM551=null;
		ParserRuleReturnScope adminOptionFor546 =null;
		ParserRuleReturnScope identifier548 =null;
		ParserRuleReturnScope identifier550 =null;
		ParserRuleReturnScope principalSpecification552 =null;

		ASTNode KW_REVOKE545_tree=null;
		ASTNode KW_ROLE547_tree=null;
		ASTNode COMMA549_tree=null;
		ASTNode KW_FROM551_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
		RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
		RewriteRuleTokenStream stream_KW_REVOKE=new RewriteRuleTokenStream(adaptor,"token KW_REVOKE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_adminOptionFor=new RewriteRuleSubtreeStream(adaptor,"rule adminOptionFor");
		RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");

		pushMsg("revoke role", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:5: ( KW_REVOKE ( adminOptionFor )? ( KW_ROLE )? identifier ( COMMA identifier )* KW_FROM principalSpecification -> ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:7: KW_REVOKE ( adminOptionFor )? ( KW_ROLE )? identifier ( COMMA identifier )* KW_FROM principalSpecification
			{
			KW_REVOKE545=(Token)match(input,KW_REVOKE,FOLLOW_KW_REVOKE_in_revokeRole8958); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REVOKE.add(KW_REVOKE545);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:17: ( adminOptionFor )?
			int alt166=2;
			int LA166_0 = input.LA(1);
			if ( (LA166_0==KW_ADMIN) ) {
				int LA166_1 = input.LA(2);
				if ( (LA166_1==KW_OPTION) ) {
					alt166=1;
				}
			}
			switch (alt166) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:17: adminOptionFor
					{
					pushFollow(FOLLOW_adminOptionFor_in_revokeRole8960);
					adminOptionFor546=adminOptionFor();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_adminOptionFor.add(adminOptionFor546.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:33: ( KW_ROLE )?
			int alt167=2;
			int LA167_0 = input.LA(1);
			if ( (LA167_0==KW_ROLE) ) {
				int LA167_1 = input.LA(2);
				if ( (LA167_1==Identifier||(LA167_1 >= KW_ABORT && LA167_1 <= KW_AFTER)||LA167_1==KW_ALLOC_FRACTION||LA167_1==KW_ANALYZE||LA167_1==KW_ARCHIVE||LA167_1==KW_ASC||(LA167_1 >= KW_AUTOCOMMIT && LA167_1 <= KW_BEFORE)||(LA167_1 >= KW_BUCKET && LA167_1 <= KW_BUCKETS)||(LA167_1 >= KW_CACHE && LA167_1 <= KW_CASCADE)||LA167_1==KW_CHANGE||(LA167_1 >= KW_CHECK && LA167_1 <= KW_COLLECTION)||(LA167_1 >= KW_COLUMNS && LA167_1 <= KW_COMMENT)||(LA167_1 >= KW_COMPACT && LA167_1 <= KW_CONCATENATE)||LA167_1==KW_CONTINUE||LA167_1==KW_DATA||LA167_1==KW_DATABASES||(LA167_1 >= KW_DATETIME && LA167_1 <= KW_DBPROPERTIES)||(LA167_1 >= KW_DEFAULT && LA167_1 <= KW_DEFINED)||(LA167_1 >= KW_DELIMITED && LA167_1 <= KW_DESC)||(LA167_1 >= KW_DETAIL && LA167_1 <= KW_DISABLE)||(LA167_1 >= KW_DISTRIBUTE && LA167_1 <= KW_DO)||LA167_1==KW_DOW||(LA167_1 >= KW_DUMP && LA167_1 <= KW_ELEM_TYPE)||LA167_1==KW_ENABLE||(LA167_1 >= KW_ENFORCED && LA167_1 <= KW_ESCAPED)||LA167_1==KW_EXCLUSIVE||(LA167_1 >= KW_EXPLAIN && LA167_1 <= KW_EXPRESSION)||(LA167_1 >= KW_FIELDS && LA167_1 <= KW_FIRST)||(LA167_1 >= KW_FORMAT && LA167_1 <= KW_FORMATTED)||LA167_1==KW_FUNCTIONS||(LA167_1 >= KW_HOUR && LA167_1 <= KW_IDXPROPERTIES)||(LA167_1 >= KW_INDEX && LA167_1 <= KW_INDEXES)||(LA167_1 >= KW_INPATH && LA167_1 <= KW_INPUTFORMAT)||(LA167_1 >= KW_ISOLATION && LA167_1 <= KW_JAR)||(LA167_1 >= KW_KEY && LA167_1 <= KW_LAST)||LA167_1==KW_LEVEL||(LA167_1 >= KW_LIMIT && LA167_1 <= KW_LOAD)||(LA167_1 >= KW_LOCATION && LA167_1 <= KW_LONG)||LA167_1==KW_MANAGEMENT||(LA167_1 >= KW_MAPJOIN && LA167_1 <= KW_MATERIALIZED)||LA167_1==KW_METADATA||(LA167_1 >= KW_MINUTE && LA167_1 <= KW_MONTH)||(LA167_1 >= KW_MOVE && LA167_1 <= KW_MSCK)||(LA167_1 >= KW_NORELY && LA167_1 <= KW_NOSCAN)||LA167_1==KW_NOVALIDATE||LA167_1==KW_NULLS||LA167_1==KW_OFFSET||(LA167_1 >= KW_OPERATOR && LA167_1 <= KW_OPTION)||(LA167_1 >= KW_OUTPUTDRIVER && LA167_1 <= KW_OUTPUTFORMAT)||(LA167_1 >= KW_OVERWRITE && LA167_1 <= KW_OWNER)||(LA167_1 >= KW_PARTITIONED && LA167_1 <= KW_PATH)||(LA167_1 >= KW_PLAN && LA167_1 <= KW_POOL)||LA167_1==KW_PRINCIPALS||(LA167_1 >= KW_PURGE && LA167_1 <= KW_QUERY_PARALLELISM)||LA167_1==KW_READ||(LA167_1 >= KW_REBUILD && LA167_1 <= KW_RECORDWRITER)||(LA167_1 >= KW_RELOAD && LA167_1 <= KW_RESTRICT)||LA167_1==KW_REWRITE||(LA167_1 >= KW_ROLE && LA167_1 <= KW_ROLES)||(LA167_1 >= KW_SCHEDULING_POLICY && LA167_1 <= KW_SECOND)||(LA167_1 >= KW_SEMI && LA167_1 <= KW_SERVER)||(LA167_1 >= KW_SETS && LA167_1 <= KW_SKEWED)||(LA167_1 >= KW_SNAPSHOT && LA167_1 <= KW_SSL)||(LA167_1 >= KW_STATISTICS && LA167_1 <= KW_SUMMARY)||LA167_1==KW_TABLES||(LA167_1 >= KW_TBLPROPERTIES && LA167_1 <= KW_TERMINATED)||LA167_1==KW_TINYINT||(LA167_1 >= KW_TOUCH && LA167_1 <= KW_TRANSACTIONS)||LA167_1==KW_UNARCHIVE||LA167_1==KW_UNDO||LA167_1==KW_UNIONTYPE||(LA167_1 >= KW_UNLOCK && LA167_1 <= KW_UNSIGNED)||(LA167_1 >= KW_URI && LA167_1 <= KW_USE)||(LA167_1 >= KW_UTC && LA167_1 <= KW_VALIDATE)||LA167_1==KW_VALUE_TYPE||(LA167_1 >= KW_VECTORIZATION && LA167_1 <= KW_WEEK)||LA167_1==KW_WHILE||(LA167_1 >= KW_WORK && LA167_1 <= KW_ZONE)||LA167_1==KW_BATCH||LA167_1==KW_DAYOFWEEK||LA167_1==KW_HOLD_DDLTIME||LA167_1==KW_IGNORE||LA167_1==KW_NO_DROP||LA167_1==KW_OFFLINE||LA167_1==KW_PROTECTION||LA167_1==KW_READONLY||LA167_1==KW_TIMESTAMPTZ) ) {
					alt167=1;
				}
			}
			switch (alt167) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:33: KW_ROLE
					{
					KW_ROLE547=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_revokeRole8963); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ROLE.add(KW_ROLE547);

					}
					break;

			}

			pushFollow(FOLLOW_identifier_in_revokeRole8966);
			identifier548=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(identifier548.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:53: ( COMMA identifier )*
			loop168:
			while (true) {
				int alt168=2;
				int LA168_0 = input.LA(1);
				if ( (LA168_0==COMMA) ) {
					alt168=1;
				}

				switch (alt168) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:54: COMMA identifier
					{
					COMMA549=(Token)match(input,COMMA,FOLLOW_COMMA_in_revokeRole8969); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA549);

					pushFollow(FOLLOW_identifier_in_revokeRole8971);
					identifier550=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier550.getTree());
					}
					break;

				default :
					break loop168;
				}
			}

			KW_FROM551=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_revokeRole8975); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM551);

			pushFollow(FOLLOW_principalSpecification_in_revokeRole8977);
			principalSpecification552=principalSpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalSpecification.add(principalSpecification552.getTree());
			// AST REWRITE
			// elements: principalSpecification, identifier, adminOptionFor
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1672:5: -> ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1672:8: ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_REVOKE_ROLE, "TOK_REVOKE_ROLE"), root_1);
				adaptor.addChild(root_1, stream_principalSpecification.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1672:49: ( adminOptionFor )?
				if ( stream_adminOptionFor.hasNext() ) {
					adaptor.addChild(root_1, stream_adminOptionFor.nextTree());
				}
				stream_adminOptionFor.reset();

				if ( !(stream_identifier.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_identifier.hasNext() ) {
					adaptor.addChild(root_1, stream_identifier.nextTree());
				}
				stream_identifier.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "revokeRole"


	public static class showRoleGrants_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showRoleGrants"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1675:1: showRoleGrants : KW_SHOW KW_ROLE KW_GRANT principalName -> ^( TOK_SHOW_ROLE_GRANT principalName ) ;
	public final HiveParser.showRoleGrants_return showRoleGrants() throws RecognitionException {
		HiveParser.showRoleGrants_return retval = new HiveParser.showRoleGrants_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SHOW553=null;
		Token KW_ROLE554=null;
		Token KW_GRANT555=null;
		ParserRuleReturnScope principalName556 =null;

		ASTNode KW_SHOW553_tree=null;
		ASTNode KW_ROLE554_tree=null;
		ASTNode KW_GRANT555_tree=null;
		RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
		RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
		RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
		RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");

		pushMsg("show role grants", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1678:5: ( KW_SHOW KW_ROLE KW_GRANT principalName -> ^( TOK_SHOW_ROLE_GRANT principalName ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1678:7: KW_SHOW KW_ROLE KW_GRANT principalName
			{
			KW_SHOW553=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showRoleGrants9022); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW553);

			KW_ROLE554=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_showRoleGrants9024); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROLE.add(KW_ROLE554);

			KW_GRANT555=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_showRoleGrants9026); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_GRANT.add(KW_GRANT555);

			pushFollow(FOLLOW_principalName_in_showRoleGrants9028);
			principalName556=principalName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalName.add(principalName556.getTree());
			// AST REWRITE
			// elements: principalName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1679:5: -> ^( TOK_SHOW_ROLE_GRANT principalName )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1679:8: ^( TOK_SHOW_ROLE_GRANT principalName )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_ROLE_GRANT, "TOK_SHOW_ROLE_GRANT"), root_1);
				adaptor.addChild(root_1, stream_principalName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showRoleGrants"


	public static class showRoles_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showRoles"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1683:1: showRoles : KW_SHOW KW_ROLES -> ^( TOK_SHOW_ROLES ) ;
	public final HiveParser.showRoles_return showRoles() throws RecognitionException {
		HiveParser.showRoles_return retval = new HiveParser.showRoles_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SHOW557=null;
		Token KW_ROLES558=null;

		ASTNode KW_SHOW557_tree=null;
		ASTNode KW_ROLES558_tree=null;
		RewriteRuleTokenStream stream_KW_ROLES=new RewriteRuleTokenStream(adaptor,"token KW_ROLES");
		RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");

		pushMsg("show roles", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1686:5: ( KW_SHOW KW_ROLES -> ^( TOK_SHOW_ROLES ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1686:7: KW_SHOW KW_ROLES
			{
			KW_SHOW557=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showRoles9068); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW557);

			KW_ROLES558=(Token)match(input,KW_ROLES,FOLLOW_KW_ROLES_in_showRoles9070); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROLES.add(KW_ROLES558);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1687:5: -> ^( TOK_SHOW_ROLES )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1687:8: ^( TOK_SHOW_ROLES )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_ROLES, "TOK_SHOW_ROLES"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showRoles"


	public static class showCurrentRole_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showCurrentRole"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1690:1: showCurrentRole : KW_SHOW KW_CURRENT KW_ROLES -> ^( TOK_SHOW_SET_ROLE ) ;
	public final HiveParser.showCurrentRole_return showCurrentRole() throws RecognitionException {
		HiveParser.showCurrentRole_return retval = new HiveParser.showCurrentRole_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SHOW559=null;
		Token KW_CURRENT560=null;
		Token KW_ROLES561=null;

		ASTNode KW_SHOW559_tree=null;
		ASTNode KW_CURRENT560_tree=null;
		ASTNode KW_ROLES561_tree=null;
		RewriteRuleTokenStream stream_KW_ROLES=new RewriteRuleTokenStream(adaptor,"token KW_ROLES");
		RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
		RewriteRuleTokenStream stream_KW_CURRENT=new RewriteRuleTokenStream(adaptor,"token KW_CURRENT");

		pushMsg("show current role", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1693:5: ( KW_SHOW KW_CURRENT KW_ROLES -> ^( TOK_SHOW_SET_ROLE ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1693:7: KW_SHOW KW_CURRENT KW_ROLES
			{
			KW_SHOW559=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showCurrentRole9107); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW559);

			KW_CURRENT560=(Token)match(input,KW_CURRENT,FOLLOW_KW_CURRENT_in_showCurrentRole9109); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CURRENT.add(KW_CURRENT560);

			KW_ROLES561=(Token)match(input,KW_ROLES,FOLLOW_KW_ROLES_in_showCurrentRole9111); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROLES.add(KW_ROLES561);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1694:5: -> ^( TOK_SHOW_SET_ROLE )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1694:8: ^( TOK_SHOW_SET_ROLE )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_SET_ROLE, "TOK_SHOW_SET_ROLE"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showCurrentRole"


	public static class setRole_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "setRole"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1697:1: setRole : KW_SET KW_ROLE ( ( KW_ALL )=> (all= KW_ALL ) -> ^( TOK_SHOW_SET_ROLE Identifier[$all.text] ) | ( KW_NONE )=> (none= KW_NONE ) -> ^( TOK_SHOW_SET_ROLE Identifier[$none.text] ) | identifier -> ^( TOK_SHOW_SET_ROLE identifier ) ) ;
	public final HiveParser.setRole_return setRole() throws RecognitionException {
		HiveParser.setRole_return retval = new HiveParser.setRole_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token all=null;
		Token none=null;
		Token KW_SET562=null;
		Token KW_ROLE563=null;
		ParserRuleReturnScope identifier564 =null;

		ASTNode all_tree=null;
		ASTNode none_tree=null;
		ASTNode KW_SET562_tree=null;
		ASTNode KW_ROLE563_tree=null;
		RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
		RewriteRuleTokenStream stream_KW_NONE=new RewriteRuleTokenStream(adaptor,"token KW_NONE");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleTokenStream stream_KW_ALL=new RewriteRuleTokenStream(adaptor,"token KW_ALL");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		pushMsg("set role", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1700:5: ( KW_SET KW_ROLE ( ( KW_ALL )=> (all= KW_ALL ) -> ^( TOK_SHOW_SET_ROLE Identifier[$all.text] ) | ( KW_NONE )=> (none= KW_NONE ) -> ^( TOK_SHOW_SET_ROLE Identifier[$none.text] ) | identifier -> ^( TOK_SHOW_SET_ROLE identifier ) ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1700:7: KW_SET KW_ROLE ( ( KW_ALL )=> (all= KW_ALL ) -> ^( TOK_SHOW_SET_ROLE Identifier[$all.text] ) | ( KW_NONE )=> (none= KW_NONE ) -> ^( TOK_SHOW_SET_ROLE Identifier[$none.text] ) | identifier -> ^( TOK_SHOW_SET_ROLE identifier ) )
			{
			KW_SET562=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_setRole9148); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET562);

			KW_ROLE563=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_setRole9150); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROLE.add(KW_ROLE563);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1701:5: ( ( KW_ALL )=> (all= KW_ALL ) -> ^( TOK_SHOW_SET_ROLE Identifier[$all.text] ) | ( KW_NONE )=> (none= KW_NONE ) -> ^( TOK_SHOW_SET_ROLE Identifier[$none.text] ) | identifier -> ^( TOK_SHOW_SET_ROLE identifier ) )
			int alt169=3;
			int LA169_0 = input.LA(1);
			if ( (LA169_0==KW_ALL) && (synpred14_HiveParser())) {
				alt169=1;
			}
			else if ( (LA169_0==KW_NONE) && (synpred15_HiveParser())) {
				alt169=2;
			}
			else if ( (LA169_0==Identifier||(LA169_0 >= KW_ABORT && LA169_0 <= KW_AFTER)||LA169_0==KW_ALLOC_FRACTION||LA169_0==KW_ANALYZE||LA169_0==KW_ARCHIVE||LA169_0==KW_ASC||(LA169_0 >= KW_AUTOCOMMIT && LA169_0 <= KW_BEFORE)||(LA169_0 >= KW_BUCKET && LA169_0 <= KW_BUCKETS)||(LA169_0 >= KW_CACHE && LA169_0 <= KW_CASCADE)||LA169_0==KW_CHANGE||(LA169_0 >= KW_CHECK && LA169_0 <= KW_COLLECTION)||(LA169_0 >= KW_COLUMNS && LA169_0 <= KW_COMMENT)||(LA169_0 >= KW_COMPACT && LA169_0 <= KW_CONCATENATE)||LA169_0==KW_CONTINUE||LA169_0==KW_DATA||LA169_0==KW_DATABASES||(LA169_0 >= KW_DATETIME && LA169_0 <= KW_DBPROPERTIES)||(LA169_0 >= KW_DEFAULT && LA169_0 <= KW_DEFINED)||(LA169_0 >= KW_DELIMITED && LA169_0 <= KW_DESC)||(LA169_0 >= KW_DETAIL && LA169_0 <= KW_DISABLE)||(LA169_0 >= KW_DISTRIBUTE && LA169_0 <= KW_DO)||LA169_0==KW_DOW||(LA169_0 >= KW_DUMP && LA169_0 <= KW_ELEM_TYPE)||LA169_0==KW_ENABLE||(LA169_0 >= KW_ENFORCED && LA169_0 <= KW_ESCAPED)||LA169_0==KW_EXCLUSIVE||(LA169_0 >= KW_EXPLAIN && LA169_0 <= KW_EXPRESSION)||(LA169_0 >= KW_FIELDS && LA169_0 <= KW_FIRST)||(LA169_0 >= KW_FORMAT && LA169_0 <= KW_FORMATTED)||LA169_0==KW_FUNCTIONS||(LA169_0 >= KW_HOUR && LA169_0 <= KW_IDXPROPERTIES)||(LA169_0 >= KW_INDEX && LA169_0 <= KW_INDEXES)||(LA169_0 >= KW_INPATH && LA169_0 <= KW_INPUTFORMAT)||(LA169_0 >= KW_ISOLATION && LA169_0 <= KW_JAR)||(LA169_0 >= KW_KEY && LA169_0 <= KW_LAST)||LA169_0==KW_LEVEL||(LA169_0 >= KW_LIMIT && LA169_0 <= KW_LOAD)||(LA169_0 >= KW_LOCATION && LA169_0 <= KW_LONG)||LA169_0==KW_MANAGEMENT||(LA169_0 >= KW_MAPJOIN && LA169_0 <= KW_MATERIALIZED)||LA169_0==KW_METADATA||(LA169_0 >= KW_MINUTE && LA169_0 <= KW_MONTH)||(LA169_0 >= KW_MOVE && LA169_0 <= KW_MSCK)||(LA169_0 >= KW_NORELY && LA169_0 <= KW_NOSCAN)||LA169_0==KW_NOVALIDATE||LA169_0==KW_NULLS||LA169_0==KW_OFFSET||(LA169_0 >= KW_OPERATOR && LA169_0 <= KW_OPTION)||(LA169_0 >= KW_OUTPUTDRIVER && LA169_0 <= KW_OUTPUTFORMAT)||(LA169_0 >= KW_OVERWRITE && LA169_0 <= KW_OWNER)||(LA169_0 >= KW_PARTITIONED && LA169_0 <= KW_PATH)||(LA169_0 >= KW_PLAN && LA169_0 <= KW_POOL)||LA169_0==KW_PRINCIPALS||(LA169_0 >= KW_PURGE && LA169_0 <= KW_QUERY_PARALLELISM)||LA169_0==KW_READ||(LA169_0 >= KW_REBUILD && LA169_0 <= KW_RECORDWRITER)||(LA169_0 >= KW_RELOAD && LA169_0 <= KW_RESTRICT)||LA169_0==KW_REWRITE||(LA169_0 >= KW_ROLE && LA169_0 <= KW_ROLES)||(LA169_0 >= KW_SCHEDULING_POLICY && LA169_0 <= KW_SECOND)||(LA169_0 >= KW_SEMI && LA169_0 <= KW_SERVER)||(LA169_0 >= KW_SETS && LA169_0 <= KW_SKEWED)||(LA169_0 >= KW_SNAPSHOT && LA169_0 <= KW_SSL)||(LA169_0 >= KW_STATISTICS && LA169_0 <= KW_SUMMARY)||LA169_0==KW_TABLES||(LA169_0 >= KW_TBLPROPERTIES && LA169_0 <= KW_TERMINATED)||LA169_0==KW_TINYINT||(LA169_0 >= KW_TOUCH && LA169_0 <= KW_TRANSACTIONS)||LA169_0==KW_UNARCHIVE||LA169_0==KW_UNDO||LA169_0==KW_UNIONTYPE||(LA169_0 >= KW_UNLOCK && LA169_0 <= KW_UNSIGNED)||(LA169_0 >= KW_URI && LA169_0 <= KW_USE)||(LA169_0 >= KW_UTC && LA169_0 <= KW_VALIDATE)||LA169_0==KW_VALUE_TYPE||(LA169_0 >= KW_VECTORIZATION && LA169_0 <= KW_WEEK)||LA169_0==KW_WHILE||(LA169_0 >= KW_WORK && LA169_0 <= KW_ZONE)||LA169_0==KW_BATCH||LA169_0==KW_DAYOFWEEK||LA169_0==KW_HOLD_DDLTIME||LA169_0==KW_IGNORE||LA169_0==KW_NO_DROP||LA169_0==KW_OFFLINE||LA169_0==KW_PROTECTION||LA169_0==KW_READONLY||LA169_0==KW_TIMESTAMPTZ) ) {
				alt169=3;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 169, 0, input);
				throw nvae;
			}

			switch (alt169) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1702:5: ( KW_ALL )=> (all= KW_ALL )
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1702:17: (all= KW_ALL )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1702:18: all= KW_ALL
					{
					all=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_setRole9172); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALL.add(all);

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1702:30: -> ^( TOK_SHOW_SET_ROLE Identifier[$all.text] )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1702:33: ^( TOK_SHOW_SET_ROLE Identifier[$all.text] )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_SET_ROLE, "TOK_SHOW_SET_ROLE"), root_1);
						adaptor.addChild(root_1, (ASTNode)adaptor.create(Identifier, (all!=null?all.getText():null)));
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:5: ( KW_NONE )=> (none= KW_NONE )
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:18: (none= KW_NONE )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:19: none= KW_NONE
					{
					none=(Token)match(input,KW_NONE,FOLLOW_KW_NONE_in_setRole9203); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NONE.add(none);

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1704:33: -> ^( TOK_SHOW_SET_ROLE Identifier[$none.text] )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:36: ^( TOK_SHOW_SET_ROLE Identifier[$none.text] )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_SET_ROLE, "TOK_SHOW_SET_ROLE"), root_1);
						adaptor.addChild(root_1, (ASTNode)adaptor.create(Identifier, (none!=null?none.getText():null)));
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1706:5: identifier
					{
					pushFollow(FOLLOW_identifier_in_setRole9225);
					identifier564=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier564.getTree());
					// AST REWRITE
					// elements: identifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1706:16: -> ^( TOK_SHOW_SET_ROLE identifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1706:19: ^( TOK_SHOW_SET_ROLE identifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_SET_ROLE, "TOK_SHOW_SET_ROLE"), root_1);
						adaptor.addChild(root_1, stream_identifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "setRole"


	public static class showGrants_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showGrants"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1710:1: showGrants : KW_SHOW KW_GRANT ( principalName )? ( KW_ON privilegeIncludeColObject )? -> ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? ) ;
	public final HiveParser.showGrants_return showGrants() throws RecognitionException {
		HiveParser.showGrants_return retval = new HiveParser.showGrants_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SHOW565=null;
		Token KW_GRANT566=null;
		Token KW_ON568=null;
		ParserRuleReturnScope principalName567 =null;
		ParserRuleReturnScope privilegeIncludeColObject569 =null;

		ASTNode KW_SHOW565_tree=null;
		ASTNode KW_GRANT566_tree=null;
		ASTNode KW_ON568_tree=null;
		RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
		RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
		RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
		RewriteRuleSubtreeStream stream_privilegeIncludeColObject=new RewriteRuleSubtreeStream(adaptor,"rule privilegeIncludeColObject");
		RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");

		pushMsg("show grants", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:5: ( KW_SHOW KW_GRANT ( principalName )? ( KW_ON privilegeIncludeColObject )? -> ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:7: KW_SHOW KW_GRANT ( principalName )? ( KW_ON privilegeIncludeColObject )?
			{
			KW_SHOW565=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showGrants9266); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW565);

			KW_GRANT566=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_showGrants9268); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_GRANT.add(KW_GRANT566);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:24: ( principalName )?
			int alt170=2;
			int LA170_0 = input.LA(1);
			if ( (LA170_0==KW_GROUP||LA170_0==KW_ROLE||LA170_0==KW_USER) ) {
				alt170=1;
			}
			switch (alt170) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:24: principalName
					{
					pushFollow(FOLLOW_principalName_in_showGrants9270);
					principalName567=principalName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_principalName.add(principalName567.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:39: ( KW_ON privilegeIncludeColObject )?
			int alt171=2;
			int LA171_0 = input.LA(1);
			if ( (LA171_0==KW_ON) ) {
				alt171=1;
			}
			switch (alt171) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:40: KW_ON privilegeIncludeColObject
					{
					KW_ON568=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_showGrants9274); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ON.add(KW_ON568);

					pushFollow(FOLLOW_privilegeIncludeColObject_in_showGrants9276);
					privilegeIncludeColObject569=privilegeIncludeColObject();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_privilegeIncludeColObject.add(privilegeIncludeColObject569.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: privilegeIncludeColObject, principalName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1714:5: -> ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1714:8: ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_GRANT, "TOK_SHOW_GRANT"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1714:25: ( principalName )?
				if ( stream_principalName.hasNext() ) {
					adaptor.addChild(root_1, stream_principalName.nextTree());
				}
				stream_principalName.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1714:40: ( privilegeIncludeColObject )?
				if ( stream_privilegeIncludeColObject.hasNext() ) {
					adaptor.addChild(root_1, stream_privilegeIncludeColObject.nextTree());
				}
				stream_privilegeIncludeColObject.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showGrants"


	public static class showRolePrincipals_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showRolePrincipals"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1717:1: showRolePrincipals : KW_SHOW KW_PRINCIPALS roleName= identifier -> ^( TOK_SHOW_ROLE_PRINCIPALS $roleName) ;
	public final HiveParser.showRolePrincipals_return showRolePrincipals() throws RecognitionException {
		HiveParser.showRolePrincipals_return retval = new HiveParser.showRolePrincipals_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SHOW570=null;
		Token KW_PRINCIPALS571=null;
		ParserRuleReturnScope roleName =null;

		ASTNode KW_SHOW570_tree=null;
		ASTNode KW_PRINCIPALS571_tree=null;
		RewriteRuleTokenStream stream_KW_PRINCIPALS=new RewriteRuleTokenStream(adaptor,"token KW_PRINCIPALS");
		RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		pushMsg("show role principals", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1720:5: ( KW_SHOW KW_PRINCIPALS roleName= identifier -> ^( TOK_SHOW_ROLE_PRINCIPALS $roleName) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1720:7: KW_SHOW KW_PRINCIPALS roleName= identifier
			{
			KW_SHOW570=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showRolePrincipals9321); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SHOW.add(KW_SHOW570);

			KW_PRINCIPALS571=(Token)match(input,KW_PRINCIPALS,FOLLOW_KW_PRINCIPALS_in_showRolePrincipals9323); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_PRINCIPALS.add(KW_PRINCIPALS571);

			pushFollow(FOLLOW_identifier_in_showRolePrincipals9327);
			roleName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(roleName.getTree());
			// AST REWRITE
			// elements: roleName
			// token labels: 
			// rule labels: roleName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_roleName=new RewriteRuleSubtreeStream(adaptor,"rule roleName",roleName!=null?roleName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1721:5: -> ^( TOK_SHOW_ROLE_PRINCIPALS $roleName)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1721:8: ^( TOK_SHOW_ROLE_PRINCIPALS $roleName)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SHOW_ROLE_PRINCIPALS, "TOK_SHOW_ROLE_PRINCIPALS"), root_1);
				adaptor.addChild(root_1, stream_roleName.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showRolePrincipals"


	public static class privilegeIncludeColObject_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "privilegeIncludeColObject"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1725:1: privilegeIncludeColObject : ( ( KW_ALL )=> KW_ALL -> ^( TOK_RESOURCE_ALL ) | privObjectCols -> ^( TOK_PRIV_OBJECT_COL privObjectCols ) );
	public final HiveParser.privilegeIncludeColObject_return privilegeIncludeColObject() throws RecognitionException {
		HiveParser.privilegeIncludeColObject_return retval = new HiveParser.privilegeIncludeColObject_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ALL572=null;
		ParserRuleReturnScope privObjectCols573 =null;

		ASTNode KW_ALL572_tree=null;
		RewriteRuleTokenStream stream_KW_ALL=new RewriteRuleTokenStream(adaptor,"token KW_ALL");
		RewriteRuleSubtreeStream stream_privObjectCols=new RewriteRuleSubtreeStream(adaptor,"rule privObjectCols");

		pushMsg("privilege object including columns", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1728:5: ( ( KW_ALL )=> KW_ALL -> ^( TOK_RESOURCE_ALL ) | privObjectCols -> ^( TOK_PRIV_OBJECT_COL privObjectCols ) )
			int alt172=2;
			int LA172_0 = input.LA(1);
			if ( (LA172_0==KW_ALL) && (synpred16_HiveParser())) {
				alt172=1;
			}
			else if ( (LA172_0==Identifier||(LA172_0 >= KW_ABORT && LA172_0 <= KW_AFTER)||LA172_0==KW_ALLOC_FRACTION||LA172_0==KW_ANALYZE||LA172_0==KW_ARCHIVE||LA172_0==KW_ASC||(LA172_0 >= KW_AUTOCOMMIT && LA172_0 <= KW_BEFORE)||(LA172_0 >= KW_BUCKET && LA172_0 <= KW_BUCKETS)||(LA172_0 >= KW_CACHE && LA172_0 <= KW_CASCADE)||LA172_0==KW_CHANGE||(LA172_0 >= KW_CHECK && LA172_0 <= KW_COLLECTION)||(LA172_0 >= KW_COLUMNS && LA172_0 <= KW_COMMENT)||(LA172_0 >= KW_COMPACT && LA172_0 <= KW_CONCATENATE)||LA172_0==KW_CONTINUE||(LA172_0 >= KW_DATA && LA172_0 <= KW_DATABASES)||(LA172_0 >= KW_DATETIME && LA172_0 <= KW_DBPROPERTIES)||(LA172_0 >= KW_DEFAULT && LA172_0 <= KW_DEFINED)||(LA172_0 >= KW_DELIMITED && LA172_0 <= KW_DESC)||(LA172_0 >= KW_DETAIL && LA172_0 <= KW_DISABLE)||(LA172_0 >= KW_DISTRIBUTE && LA172_0 <= KW_DO)||LA172_0==KW_DOW||(LA172_0 >= KW_DUMP && LA172_0 <= KW_ELEM_TYPE)||LA172_0==KW_ENABLE||(LA172_0 >= KW_ENFORCED && LA172_0 <= KW_ESCAPED)||LA172_0==KW_EXCLUSIVE||(LA172_0 >= KW_EXPLAIN && LA172_0 <= KW_EXPRESSION)||(LA172_0 >= KW_FIELDS && LA172_0 <= KW_FIRST)||(LA172_0 >= KW_FORMAT && LA172_0 <= KW_FORMATTED)||LA172_0==KW_FUNCTIONS||(LA172_0 >= KW_HOUR && LA172_0 <= KW_IDXPROPERTIES)||(LA172_0 >= KW_INDEX && LA172_0 <= KW_INDEXES)||(LA172_0 >= KW_INPATH && LA172_0 <= KW_INPUTFORMAT)||(LA172_0 >= KW_ISOLATION && LA172_0 <= KW_JAR)||(LA172_0 >= KW_KEY && LA172_0 <= KW_LAST)||LA172_0==KW_LEVEL||(LA172_0 >= KW_LIMIT && LA172_0 <= KW_LOAD)||(LA172_0 >= KW_LOCATION && LA172_0 <= KW_LONG)||LA172_0==KW_MANAGEMENT||(LA172_0 >= KW_MAPJOIN && LA172_0 <= KW_MATERIALIZED)||LA172_0==KW_METADATA||(LA172_0 >= KW_MINUTE && LA172_0 <= KW_MONTH)||(LA172_0 >= KW_MOVE && LA172_0 <= KW_MSCK)||(LA172_0 >= KW_NORELY && LA172_0 <= KW_NOSCAN)||LA172_0==KW_NOVALIDATE||LA172_0==KW_NULLS||LA172_0==KW_OFFSET||(LA172_0 >= KW_OPERATOR && LA172_0 <= KW_OPTION)||(LA172_0 >= KW_OUTPUTDRIVER && LA172_0 <= KW_OUTPUTFORMAT)||(LA172_0 >= KW_OVERWRITE && LA172_0 <= KW_OWNER)||(LA172_0 >= KW_PARTITIONED && LA172_0 <= KW_PATH)||(LA172_0 >= KW_PLAN && LA172_0 <= KW_POOL)||LA172_0==KW_PRINCIPALS||(LA172_0 >= KW_PURGE && LA172_0 <= KW_QUERY_PARALLELISM)||LA172_0==KW_READ||(LA172_0 >= KW_REBUILD && LA172_0 <= KW_RECORDWRITER)||(LA172_0 >= KW_RELOAD && LA172_0 <= KW_RESTRICT)||LA172_0==KW_REWRITE||(LA172_0 >= KW_ROLE && LA172_0 <= KW_ROLES)||(LA172_0 >= KW_SCHEDULING_POLICY && LA172_0 <= KW_SECOND)||(LA172_0 >= KW_SEMI && LA172_0 <= KW_SERVER)||(LA172_0 >= KW_SETS && LA172_0 <= KW_SKEWED)||(LA172_0 >= KW_SNAPSHOT && LA172_0 <= KW_SSL)||(LA172_0 >= KW_STATISTICS && LA172_0 <= KW_SUMMARY)||(LA172_0 >= KW_TABLE && LA172_0 <= KW_TABLES)||(LA172_0 >= KW_TBLPROPERTIES && LA172_0 <= KW_TERMINATED)||LA172_0==KW_TINYINT||(LA172_0 >= KW_TOUCH && LA172_0 <= KW_TRANSACTIONS)||LA172_0==KW_UNARCHIVE||LA172_0==KW_UNDO||LA172_0==KW_UNIONTYPE||(LA172_0 >= KW_UNLOCK && LA172_0 <= KW_UNSIGNED)||(LA172_0 >= KW_URI && LA172_0 <= KW_USE)||(LA172_0 >= KW_UTC && LA172_0 <= KW_VALIDATE)||LA172_0==KW_VALUE_TYPE||(LA172_0 >= KW_VECTORIZATION && LA172_0 <= KW_WEEK)||LA172_0==KW_WHILE||(LA172_0 >= KW_WORK && LA172_0 <= KW_ZONE)||LA172_0==KW_BATCH||LA172_0==KW_DAYOFWEEK||LA172_0==KW_HOLD_DDLTIME||LA172_0==KW_IGNORE||LA172_0==KW_NO_DROP||LA172_0==KW_OFFLINE||LA172_0==KW_PROTECTION||LA172_0==KW_READONLY||LA172_0==KW_TIMESTAMPTZ) ) {
				alt172=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 172, 0, input);
				throw nvae;
			}

			switch (alt172) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1728:7: ( KW_ALL )=> KW_ALL
					{
					KW_ALL572=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_privilegeIncludeColObject9374); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALL.add(KW_ALL572);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1728:26: -> ^( TOK_RESOURCE_ALL )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1728:29: ^( TOK_RESOURCE_ALL )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RESOURCE_ALL, "TOK_RESOURCE_ALL"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1729:7: privObjectCols
					{
					pushFollow(FOLLOW_privObjectCols_in_privilegeIncludeColObject9388);
					privObjectCols573=privObjectCols();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_privObjectCols.add(privObjectCols573.getTree());
					// AST REWRITE
					// elements: privObjectCols
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1729:22: -> ^( TOK_PRIV_OBJECT_COL privObjectCols )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1729:25: ^( TOK_PRIV_OBJECT_COL privObjectCols )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_OBJECT_COL, "TOK_PRIV_OBJECT_COL"), root_1);
						adaptor.addChild(root_1, stream_privObjectCols.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "privilegeIncludeColObject"


	public static class privilegeObject_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "privilegeObject"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1732:1: privilegeObject : KW_ON privObject -> ^( TOK_PRIV_OBJECT privObject ) ;
	public final HiveParser.privilegeObject_return privilegeObject() throws RecognitionException {
		HiveParser.privilegeObject_return retval = new HiveParser.privilegeObject_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ON574=null;
		ParserRuleReturnScope privObject575 =null;

		ASTNode KW_ON574_tree=null;
		RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
		RewriteRuleSubtreeStream stream_privObject=new RewriteRuleSubtreeStream(adaptor,"rule privObject");

		pushMsg("privilege object", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1735:5: ( KW_ON privObject -> ^( TOK_PRIV_OBJECT privObject ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1735:7: KW_ON privObject
			{
			KW_ON574=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_privilegeObject9423); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ON.add(KW_ON574);

			pushFollow(FOLLOW_privObject_in_privilegeObject9425);
			privObject575=privObject();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_privObject.add(privObject575.getTree());
			// AST REWRITE
			// elements: privObject
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1735:24: -> ^( TOK_PRIV_OBJECT privObject )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1735:27: ^( TOK_PRIV_OBJECT privObject )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_OBJECT, "TOK_PRIV_OBJECT"), root_1);
				adaptor.addChild(root_1, stream_privObject.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "privilegeObject"


	public static class privObject_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "privObject"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1739:1: privObject : ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( partitionSpec )? ) | KW_URI (path= StringLiteral ) -> ^( TOK_URI_TYPE $path) | KW_SERVER identifier -> ^( TOK_SERVER_TYPE identifier ) );
	public final HiveParser.privObject_return privObject() throws RecognitionException {
		HiveParser.privObject_return retval = new HiveParser.privObject_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token path=null;
		Token KW_DATABASE576=null;
		Token KW_SCHEMA577=null;
		Token KW_TABLE579=null;
		Token KW_URI582=null;
		Token KW_SERVER583=null;
		ParserRuleReturnScope identifier578 =null;
		ParserRuleReturnScope tableName580 =null;
		ParserRuleReturnScope partitionSpec581 =null;
		ParserRuleReturnScope identifier584 =null;

		ASTNode path_tree=null;
		ASTNode KW_DATABASE576_tree=null;
		ASTNode KW_SCHEMA577_tree=null;
		ASTNode KW_TABLE579_tree=null;
		ASTNode KW_URI582_tree=null;
		ASTNode KW_SERVER583_tree=null;
		RewriteRuleTokenStream stream_KW_SERVER=new RewriteRuleTokenStream(adaptor,"token KW_SERVER");
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_URI=new RewriteRuleTokenStream(adaptor,"token KW_URI");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1740:5: ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( partitionSpec )? ) | KW_URI (path= StringLiteral ) -> ^( TOK_URI_TYPE $path) | KW_SERVER identifier -> ^( TOK_SERVER_TYPE identifier ) )
			int alt176=4;
			switch ( input.LA(1) ) {
			case KW_DATABASE:
				{
				alt176=1;
				}
				break;
			case KW_SCHEMA:
				{
				int LA176_2 = input.LA(2);
				if ( (LA176_2==Identifier||(LA176_2 >= KW_ABORT && LA176_2 <= KW_AFTER)||LA176_2==KW_ALLOC_FRACTION||LA176_2==KW_ANALYZE||LA176_2==KW_ARCHIVE||LA176_2==KW_ASC||(LA176_2 >= KW_AUTOCOMMIT && LA176_2 <= KW_BEFORE)||(LA176_2 >= KW_BUCKET && LA176_2 <= KW_BUCKETS)||(LA176_2 >= KW_CACHE && LA176_2 <= KW_CASCADE)||LA176_2==KW_CHANGE||(LA176_2 >= KW_CHECK && LA176_2 <= KW_COLLECTION)||(LA176_2 >= KW_COLUMNS && LA176_2 <= KW_COMMENT)||(LA176_2 >= KW_COMPACT && LA176_2 <= KW_CONCATENATE)||LA176_2==KW_CONTINUE||LA176_2==KW_DATA||LA176_2==KW_DATABASES||(LA176_2 >= KW_DATETIME && LA176_2 <= KW_DBPROPERTIES)||(LA176_2 >= KW_DEFAULT && LA176_2 <= KW_DEFINED)||(LA176_2 >= KW_DELIMITED && LA176_2 <= KW_DESC)||(LA176_2 >= KW_DETAIL && LA176_2 <= KW_DISABLE)||(LA176_2 >= KW_DISTRIBUTE && LA176_2 <= KW_DO)||LA176_2==KW_DOW||(LA176_2 >= KW_DUMP && LA176_2 <= KW_ELEM_TYPE)||LA176_2==KW_ENABLE||(LA176_2 >= KW_ENFORCED && LA176_2 <= KW_ESCAPED)||LA176_2==KW_EXCLUSIVE||(LA176_2 >= KW_EXPLAIN && LA176_2 <= KW_EXPRESSION)||(LA176_2 >= KW_FIELDS && LA176_2 <= KW_FIRST)||(LA176_2 >= KW_FORMAT && LA176_2 <= KW_FORMATTED)||LA176_2==KW_FUNCTIONS||(LA176_2 >= KW_HOUR && LA176_2 <= KW_IDXPROPERTIES)||(LA176_2 >= KW_INDEX && LA176_2 <= KW_INDEXES)||(LA176_2 >= KW_INPATH && LA176_2 <= KW_INPUTFORMAT)||(LA176_2 >= KW_ISOLATION && LA176_2 <= KW_JAR)||(LA176_2 >= KW_KEY && LA176_2 <= KW_LAST)||LA176_2==KW_LEVEL||(LA176_2 >= KW_LIMIT && LA176_2 <= KW_LOAD)||(LA176_2 >= KW_LOCATION && LA176_2 <= KW_LONG)||LA176_2==KW_MANAGEMENT||(LA176_2 >= KW_MAPJOIN && LA176_2 <= KW_MATERIALIZED)||LA176_2==KW_METADATA||(LA176_2 >= KW_MINUTE && LA176_2 <= KW_MONTH)||(LA176_2 >= KW_MOVE && LA176_2 <= KW_MSCK)||(LA176_2 >= KW_NORELY && LA176_2 <= KW_NOSCAN)||LA176_2==KW_NOVALIDATE||LA176_2==KW_NULLS||LA176_2==KW_OFFSET||(LA176_2 >= KW_OPERATOR && LA176_2 <= KW_OPTION)||(LA176_2 >= KW_OUTPUTDRIVER && LA176_2 <= KW_OUTPUTFORMAT)||(LA176_2 >= KW_OVERWRITE && LA176_2 <= KW_OWNER)||(LA176_2 >= KW_PARTITIONED && LA176_2 <= KW_PATH)||(LA176_2 >= KW_PLAN && LA176_2 <= KW_POOL)||LA176_2==KW_PRINCIPALS||(LA176_2 >= KW_PURGE && LA176_2 <= KW_QUERY_PARALLELISM)||LA176_2==KW_READ||(LA176_2 >= KW_REBUILD && LA176_2 <= KW_RECORDWRITER)||(LA176_2 >= KW_RELOAD && LA176_2 <= KW_RESTRICT)||LA176_2==KW_REWRITE||(LA176_2 >= KW_ROLE && LA176_2 <= KW_ROLES)||(LA176_2 >= KW_SCHEDULING_POLICY && LA176_2 <= KW_SECOND)||(LA176_2 >= KW_SEMI && LA176_2 <= KW_SERVER)||(LA176_2 >= KW_SETS && LA176_2 <= KW_SKEWED)||(LA176_2 >= KW_SNAPSHOT && LA176_2 <= KW_SSL)||(LA176_2 >= KW_STATISTICS && LA176_2 <= KW_SUMMARY)||LA176_2==KW_TABLES||(LA176_2 >= KW_TBLPROPERTIES && LA176_2 <= KW_TERMINATED)||LA176_2==KW_TINYINT||(LA176_2 >= KW_TOUCH && LA176_2 <= KW_TRANSACTIONS)||LA176_2==KW_UNARCHIVE||LA176_2==KW_UNDO||LA176_2==KW_UNIONTYPE||(LA176_2 >= KW_UNLOCK && LA176_2 <= KW_UNSIGNED)||(LA176_2 >= KW_URI && LA176_2 <= KW_USE)||(LA176_2 >= KW_UTC && LA176_2 <= KW_VALIDATE)||LA176_2==KW_VALUE_TYPE||(LA176_2 >= KW_VECTORIZATION && LA176_2 <= KW_WEEK)||LA176_2==KW_WHILE||(LA176_2 >= KW_WORK && LA176_2 <= KW_ZONE)||LA176_2==KW_BATCH||LA176_2==KW_DAYOFWEEK||LA176_2==KW_HOLD_DDLTIME||LA176_2==KW_IGNORE||LA176_2==KW_NO_DROP||LA176_2==KW_OFFLINE||LA176_2==KW_PROTECTION||LA176_2==KW_READONLY||LA176_2==KW_TIMESTAMPTZ) ) {
					alt176=1;
				}
				else if ( (LA176_2==DOT||LA176_2==KW_FROM||LA176_2==KW_PARTITION||LA176_2==KW_TO) ) {
					alt176=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 176, 2, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case Identifier:
			case KW_ABORT:
			case KW_ACTIVATE:
			case KW_ACTIVE:
			case KW_ADD:
			case KW_ADMIN:
			case KW_AFTER:
			case KW_ALLOC_FRACTION:
			case KW_ANALYZE:
			case KW_ARCHIVE:
			case KW_ASC:
			case KW_AUTOCOMMIT:
			case KW_BEFORE:
			case KW_BUCKET:
			case KW_BUCKETS:
			case KW_CACHE:
			case KW_CASCADE:
			case KW_CHANGE:
			case KW_CHECK:
			case KW_CLUSTER:
			case KW_CLUSTERED:
			case KW_CLUSTERSTATUS:
			case KW_COLLECTION:
			case KW_COLUMNS:
			case KW_COMMENT:
			case KW_COMPACT:
			case KW_COMPACTIONS:
			case KW_COMPUTE:
			case KW_CONCATENATE:
			case KW_CONTINUE:
			case KW_DATA:
			case KW_DATABASES:
			case KW_DATETIME:
			case KW_DAY:
			case KW_DBPROPERTIES:
			case KW_DEFAULT:
			case KW_DEFERRED:
			case KW_DEFINED:
			case KW_DELIMITED:
			case KW_DEPENDENCY:
			case KW_DESC:
			case KW_DETAIL:
			case KW_DIRECTORIES:
			case KW_DIRECTORY:
			case KW_DISABLE:
			case KW_DISTRIBUTE:
			case KW_DO:
			case KW_DOW:
			case KW_DUMP:
			case KW_ELEM_TYPE:
			case KW_ENABLE:
			case KW_ENFORCED:
			case KW_ESCAPED:
			case KW_EXCLUSIVE:
			case KW_EXPLAIN:
			case KW_EXPORT:
			case KW_EXPRESSION:
			case KW_FIELDS:
			case KW_FILE:
			case KW_FILEFORMAT:
			case KW_FIRST:
			case KW_FORMAT:
			case KW_FORMATTED:
			case KW_FUNCTIONS:
			case KW_HOUR:
			case KW_IDXPROPERTIES:
			case KW_INDEX:
			case KW_INDEXES:
			case KW_INPATH:
			case KW_INPUTDRIVER:
			case KW_INPUTFORMAT:
			case KW_ISOLATION:
			case KW_ITEMS:
			case KW_JAR:
			case KW_KEY:
			case KW_KEYS:
			case KW_KEY_TYPE:
			case KW_KILL:
			case KW_LAST:
			case KW_LEVEL:
			case KW_LIMIT:
			case KW_LINES:
			case KW_LOAD:
			case KW_LOCATION:
			case KW_LOCK:
			case KW_LOCKS:
			case KW_LOGICAL:
			case KW_LONG:
			case KW_MANAGEMENT:
			case KW_MAPJOIN:
			case KW_MAPPING:
			case KW_MATCHED:
			case KW_MATERIALIZED:
			case KW_METADATA:
			case KW_MINUTE:
			case KW_MONTH:
			case KW_MOVE:
			case KW_MSCK:
			case KW_NORELY:
			case KW_NOSCAN:
			case KW_NOVALIDATE:
			case KW_NULLS:
			case KW_OFFSET:
			case KW_OPERATOR:
			case KW_OPTION:
			case KW_OUTPUTDRIVER:
			case KW_OUTPUTFORMAT:
			case KW_OVERWRITE:
			case KW_OWNER:
			case KW_PARTITIONED:
			case KW_PARTITIONS:
			case KW_PATH:
			case KW_PLAN:
			case KW_PLANS:
			case KW_PLUS:
			case KW_POOL:
			case KW_PRINCIPALS:
			case KW_PURGE:
			case KW_QUARTER:
			case KW_QUERY:
			case KW_QUERY_PARALLELISM:
			case KW_READ:
			case KW_REBUILD:
			case KW_RECORDREADER:
			case KW_RECORDWRITER:
			case KW_RELOAD:
			case KW_RELY:
			case KW_RENAME:
			case KW_REOPTIMIZATION:
			case KW_REPAIR:
			case KW_REPL:
			case KW_REPLACE:
			case KW_REPLICATION:
			case KW_RESOURCE:
			case KW_RESTRICT:
			case KW_REWRITE:
			case KW_ROLE:
			case KW_ROLES:
			case KW_SCHEDULING_POLICY:
			case KW_SCHEMAS:
			case KW_SECOND:
			case KW_SEMI:
			case KW_SERDE:
			case KW_SERDEPROPERTIES:
			case KW_SETS:
			case KW_SHARED:
			case KW_SHOW:
			case KW_SHOW_DATABASE:
			case KW_SKEWED:
			case KW_SNAPSHOT:
			case KW_SORT:
			case KW_SORTED:
			case KW_SSL:
			case KW_STATISTICS:
			case KW_STATUS:
			case KW_STORED:
			case KW_STREAMTABLE:
			case KW_STRING:
			case KW_STRUCT:
			case KW_SUMMARY:
			case KW_TABLE:
			case KW_TABLES:
			case KW_TBLPROPERTIES:
			case KW_TEMPORARY:
			case KW_TERMINATED:
			case KW_TINYINT:
			case KW_TOUCH:
			case KW_TRANSACTION:
			case KW_TRANSACTIONS:
			case KW_UNARCHIVE:
			case KW_UNDO:
			case KW_UNIONTYPE:
			case KW_UNLOCK:
			case KW_UNMANAGED:
			case KW_UNSET:
			case KW_UNSIGNED:
			case KW_USE:
			case KW_UTC:
			case KW_UTCTIMESTAMP:
			case KW_VALIDATE:
			case KW_VALUE_TYPE:
			case KW_VECTORIZATION:
			case KW_VIEW:
			case KW_VIEWS:
			case KW_WAIT:
			case KW_WEEK:
			case KW_WHILE:
			case KW_WORK:
			case KW_WORKLOAD:
			case KW_WRITE:
			case KW_YEAR:
			case KW_ZONE:
			case KW_BATCH:
			case KW_DAYOFWEEK:
			case KW_HOLD_DDLTIME:
			case KW_IGNORE:
			case KW_NO_DROP:
			case KW_OFFLINE:
			case KW_PROTECTION:
			case KW_READONLY:
			case KW_TIMESTAMPTZ:
				{
				alt176=2;
				}
				break;
			case KW_URI:
				{
				int LA176_5 = input.LA(2);
				if ( (LA176_5==DOT||LA176_5==KW_FROM||LA176_5==KW_PARTITION||LA176_5==KW_TO) ) {
					alt176=2;
				}
				else if ( (LA176_5==StringLiteral) ) {
					alt176=3;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 176, 5, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_SERVER:
				{
				int LA176_6 = input.LA(2);
				if ( (LA176_6==DOT||LA176_6==KW_FROM||LA176_6==KW_PARTITION||LA176_6==KW_TO) ) {
					alt176=2;
				}
				else if ( (LA176_6==Identifier||(LA176_6 >= KW_ABORT && LA176_6 <= KW_AFTER)||LA176_6==KW_ALLOC_FRACTION||LA176_6==KW_ANALYZE||LA176_6==KW_ARCHIVE||LA176_6==KW_ASC||(LA176_6 >= KW_AUTOCOMMIT && LA176_6 <= KW_BEFORE)||(LA176_6 >= KW_BUCKET && LA176_6 <= KW_BUCKETS)||(LA176_6 >= KW_CACHE && LA176_6 <= KW_CASCADE)||LA176_6==KW_CHANGE||(LA176_6 >= KW_CHECK && LA176_6 <= KW_COLLECTION)||(LA176_6 >= KW_COLUMNS && LA176_6 <= KW_COMMENT)||(LA176_6 >= KW_COMPACT && LA176_6 <= KW_CONCATENATE)||LA176_6==KW_CONTINUE||LA176_6==KW_DATA||LA176_6==KW_DATABASES||(LA176_6 >= KW_DATETIME && LA176_6 <= KW_DBPROPERTIES)||(LA176_6 >= KW_DEFAULT && LA176_6 <= KW_DEFINED)||(LA176_6 >= KW_DELIMITED && LA176_6 <= KW_DESC)||(LA176_6 >= KW_DETAIL && LA176_6 <= KW_DISABLE)||(LA176_6 >= KW_DISTRIBUTE && LA176_6 <= KW_DO)||LA176_6==KW_DOW||(LA176_6 >= KW_DUMP && LA176_6 <= KW_ELEM_TYPE)||LA176_6==KW_ENABLE||(LA176_6 >= KW_ENFORCED && LA176_6 <= KW_ESCAPED)||LA176_6==KW_EXCLUSIVE||(LA176_6 >= KW_EXPLAIN && LA176_6 <= KW_EXPRESSION)||(LA176_6 >= KW_FIELDS && LA176_6 <= KW_FIRST)||(LA176_6 >= KW_FORMAT && LA176_6 <= KW_FORMATTED)||LA176_6==KW_FUNCTIONS||(LA176_6 >= KW_HOUR && LA176_6 <= KW_IDXPROPERTIES)||(LA176_6 >= KW_INDEX && LA176_6 <= KW_INDEXES)||(LA176_6 >= KW_INPATH && LA176_6 <= KW_INPUTFORMAT)||(LA176_6 >= KW_ISOLATION && LA176_6 <= KW_JAR)||(LA176_6 >= KW_KEY && LA176_6 <= KW_LAST)||LA176_6==KW_LEVEL||(LA176_6 >= KW_LIMIT && LA176_6 <= KW_LOAD)||(LA176_6 >= KW_LOCATION && LA176_6 <= KW_LONG)||LA176_6==KW_MANAGEMENT||(LA176_6 >= KW_MAPJOIN && LA176_6 <= KW_MATERIALIZED)||LA176_6==KW_METADATA||(LA176_6 >= KW_MINUTE && LA176_6 <= KW_MONTH)||(LA176_6 >= KW_MOVE && LA176_6 <= KW_MSCK)||(LA176_6 >= KW_NORELY && LA176_6 <= KW_NOSCAN)||LA176_6==KW_NOVALIDATE||LA176_6==KW_NULLS||LA176_6==KW_OFFSET||(LA176_6 >= KW_OPERATOR && LA176_6 <= KW_OPTION)||(LA176_6 >= KW_OUTPUTDRIVER && LA176_6 <= KW_OUTPUTFORMAT)||(LA176_6 >= KW_OVERWRITE && LA176_6 <= KW_OWNER)||(LA176_6 >= KW_PARTITIONED && LA176_6 <= KW_PATH)||(LA176_6 >= KW_PLAN && LA176_6 <= KW_POOL)||LA176_6==KW_PRINCIPALS||(LA176_6 >= KW_PURGE && LA176_6 <= KW_QUERY_PARALLELISM)||LA176_6==KW_READ||(LA176_6 >= KW_REBUILD && LA176_6 <= KW_RECORDWRITER)||(LA176_6 >= KW_RELOAD && LA176_6 <= KW_RESTRICT)||LA176_6==KW_REWRITE||(LA176_6 >= KW_ROLE && LA176_6 <= KW_ROLES)||(LA176_6 >= KW_SCHEDULING_POLICY && LA176_6 <= KW_SECOND)||(LA176_6 >= KW_SEMI && LA176_6 <= KW_SERVER)||(LA176_6 >= KW_SETS && LA176_6 <= KW_SKEWED)||(LA176_6 >= KW_SNAPSHOT && LA176_6 <= KW_SSL)||(LA176_6 >= KW_STATISTICS && LA176_6 <= KW_SUMMARY)||LA176_6==KW_TABLES||(LA176_6 >= KW_TBLPROPERTIES && LA176_6 <= KW_TERMINATED)||LA176_6==KW_TINYINT||(LA176_6 >= KW_TOUCH && LA176_6 <= KW_TRANSACTIONS)||LA176_6==KW_UNARCHIVE||LA176_6==KW_UNDO||LA176_6==KW_UNIONTYPE||(LA176_6 >= KW_UNLOCK && LA176_6 <= KW_UNSIGNED)||(LA176_6 >= KW_URI && LA176_6 <= KW_USE)||(LA176_6 >= KW_UTC && LA176_6 <= KW_VALIDATE)||LA176_6==KW_VALUE_TYPE||(LA176_6 >= KW_VECTORIZATION && LA176_6 <= KW_WEEK)||LA176_6==KW_WHILE||(LA176_6 >= KW_WORK && LA176_6 <= KW_ZONE)||LA176_6==KW_BATCH||LA176_6==KW_DAYOFWEEK||LA176_6==KW_HOLD_DDLTIME||LA176_6==KW_IGNORE||LA176_6==KW_NO_DROP||LA176_6==KW_OFFLINE||LA176_6==KW_PROTECTION||LA176_6==KW_READONLY||LA176_6==KW_TIMESTAMPTZ) ) {
					alt176=4;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 176, 6, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 176, 0, input);
				throw nvae;
			}
			switch (alt176) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1740:7: ( KW_DATABASE | KW_SCHEMA ) identifier
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1740:7: ( KW_DATABASE | KW_SCHEMA )
					int alt173=2;
					int LA173_0 = input.LA(1);
					if ( (LA173_0==KW_DATABASE) ) {
						alt173=1;
					}
					else if ( (LA173_0==KW_SCHEMA) ) {
						alt173=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 173, 0, input);
						throw nvae;
					}

					switch (alt173) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1740:8: KW_DATABASE
							{
							KW_DATABASE576=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_privObject9452); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE576);

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1740:20: KW_SCHEMA
							{
							KW_SCHEMA577=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_privObject9454); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA577);

							}
							break;

					}

					pushFollow(FOLLOW_identifier_in_privObject9457);
					identifier578=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier578.getTree());
					// AST REWRITE
					// elements: identifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1740:42: -> ^( TOK_DB_TYPE identifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1740:45: ^( TOK_DB_TYPE identifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DB_TYPE, "TOK_DB_TYPE"), root_1);
						adaptor.addChild(root_1, stream_identifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:7: ( KW_TABLE )? tableName ( partitionSpec )?
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:7: ( KW_TABLE )?
					int alt174=2;
					int LA174_0 = input.LA(1);
					if ( (LA174_0==KW_TABLE) ) {
						alt174=1;
					}
					switch (alt174) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:7: KW_TABLE
							{
							KW_TABLE579=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_privObject9473); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE579);

							}
							break;

					}

					pushFollow(FOLLOW_tableName_in_privObject9476);
					tableName580=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName580.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:27: ( partitionSpec )?
					int alt175=2;
					int LA175_0 = input.LA(1);
					if ( (LA175_0==KW_PARTITION) ) {
						alt175=1;
					}
					switch (alt175) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:27: partitionSpec
							{
							pushFollow(FOLLOW_partitionSpec_in_privObject9478);
							partitionSpec581=partitionSpec();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec581.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: tableName, partitionSpec
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1741:42: -> ^( TOK_TABLE_TYPE tableName ( partitionSpec )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:45: ^( TOK_TABLE_TYPE tableName ( partitionSpec )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLE_TYPE, "TOK_TABLE_TYPE"), root_1);
						adaptor.addChild(root_1, stream_tableName.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:72: ( partitionSpec )?
						if ( stream_partitionSpec.hasNext() ) {
							adaptor.addChild(root_1, stream_partitionSpec.nextTree());
						}
						stream_partitionSpec.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:7: KW_URI (path= StringLiteral )
					{
					KW_URI582=(Token)match(input,KW_URI,FOLLOW_KW_URI_in_privObject9498); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_URI.add(KW_URI582);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:14: (path= StringLiteral )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:15: path= StringLiteral
					{
					path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_privObject9503); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(path);

					}

					// AST REWRITE
					// elements: path
					// token labels: path
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1742:35: -> ^( TOK_URI_TYPE $path)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:39: ^( TOK_URI_TYPE $path)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_URI_TYPE, "TOK_URI_TYPE"), root_1);
						adaptor.addChild(root_1, stream_path.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1743:7: KW_SERVER identifier
					{
					KW_SERVER583=(Token)match(input,KW_SERVER,FOLLOW_KW_SERVER_in_privObject9522); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SERVER.add(KW_SERVER583);

					pushFollow(FOLLOW_identifier_in_privObject9524);
					identifier584=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier584.getTree());
					// AST REWRITE
					// elements: identifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1743:28: -> ^( TOK_SERVER_TYPE identifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1743:31: ^( TOK_SERVER_TYPE identifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SERVER_TYPE, "TOK_SERVER_TYPE"), root_1);
						adaptor.addChild(root_1, stream_identifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "privObject"


	public static class privObjectCols_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "privObjectCols"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1746:1: privObjectCols : ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( LPAREN cols= columnNameList RPAREN )? ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? ) | KW_URI (path= StringLiteral ) -> ^( TOK_URI_TYPE $path) | KW_SERVER identifier -> ^( TOK_SERVER_TYPE identifier ) );
	public final HiveParser.privObjectCols_return privObjectCols() throws RecognitionException {
		HiveParser.privObjectCols_return retval = new HiveParser.privObjectCols_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token path=null;
		Token KW_DATABASE585=null;
		Token KW_SCHEMA586=null;
		Token KW_TABLE588=null;
		Token LPAREN590=null;
		Token RPAREN591=null;
		Token KW_URI593=null;
		Token KW_SERVER594=null;
		ParserRuleReturnScope cols =null;
		ParserRuleReturnScope identifier587 =null;
		ParserRuleReturnScope tableName589 =null;
		ParserRuleReturnScope partitionSpec592 =null;
		ParserRuleReturnScope identifier595 =null;

		ASTNode path_tree=null;
		ASTNode KW_DATABASE585_tree=null;
		ASTNode KW_SCHEMA586_tree=null;
		ASTNode KW_TABLE588_tree=null;
		ASTNode LPAREN590_tree=null;
		ASTNode RPAREN591_tree=null;
		ASTNode KW_URI593_tree=null;
		ASTNode KW_SERVER594_tree=null;
		RewriteRuleTokenStream stream_KW_SERVER=new RewriteRuleTokenStream(adaptor,"token KW_SERVER");
		RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
		RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_URI=new RewriteRuleTokenStream(adaptor,"token KW_URI");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1747:5: ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( LPAREN cols= columnNameList RPAREN )? ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? ) | KW_URI (path= StringLiteral ) -> ^( TOK_URI_TYPE $path) | KW_SERVER identifier -> ^( TOK_SERVER_TYPE identifier ) )
			int alt181=4;
			switch ( input.LA(1) ) {
			case KW_DATABASE:
				{
				alt181=1;
				}
				break;
			case KW_SCHEMA:
				{
				int LA181_2 = input.LA(2);
				if ( (LA181_2==Identifier||(LA181_2 >= KW_ABORT && LA181_2 <= KW_AFTER)||LA181_2==KW_ALLOC_FRACTION||LA181_2==KW_ANALYZE||LA181_2==KW_ARCHIVE||LA181_2==KW_ASC||(LA181_2 >= KW_AUTOCOMMIT && LA181_2 <= KW_BEFORE)||(LA181_2 >= KW_BUCKET && LA181_2 <= KW_BUCKETS)||(LA181_2 >= KW_CACHE && LA181_2 <= KW_CASCADE)||LA181_2==KW_CHANGE||(LA181_2 >= KW_CHECK && LA181_2 <= KW_COLLECTION)||(LA181_2 >= KW_COLUMNS && LA181_2 <= KW_COMMENT)||(LA181_2 >= KW_COMPACT && LA181_2 <= KW_CONCATENATE)||LA181_2==KW_CONTINUE||LA181_2==KW_DATA||LA181_2==KW_DATABASES||(LA181_2 >= KW_DATETIME && LA181_2 <= KW_DBPROPERTIES)||(LA181_2 >= KW_DEFAULT && LA181_2 <= KW_DEFINED)||(LA181_2 >= KW_DELIMITED && LA181_2 <= KW_DESC)||(LA181_2 >= KW_DETAIL && LA181_2 <= KW_DISABLE)||(LA181_2 >= KW_DISTRIBUTE && LA181_2 <= KW_DO)||LA181_2==KW_DOW||(LA181_2 >= KW_DUMP && LA181_2 <= KW_ELEM_TYPE)||LA181_2==KW_ENABLE||(LA181_2 >= KW_ENFORCED && LA181_2 <= KW_ESCAPED)||LA181_2==KW_EXCLUSIVE||(LA181_2 >= KW_EXPLAIN && LA181_2 <= KW_EXPRESSION)||(LA181_2 >= KW_FIELDS && LA181_2 <= KW_FIRST)||(LA181_2 >= KW_FORMAT && LA181_2 <= KW_FORMATTED)||LA181_2==KW_FUNCTIONS||(LA181_2 >= KW_HOUR && LA181_2 <= KW_IDXPROPERTIES)||(LA181_2 >= KW_INDEX && LA181_2 <= KW_INDEXES)||(LA181_2 >= KW_INPATH && LA181_2 <= KW_INPUTFORMAT)||(LA181_2 >= KW_ISOLATION && LA181_2 <= KW_JAR)||(LA181_2 >= KW_KEY && LA181_2 <= KW_LAST)||LA181_2==KW_LEVEL||(LA181_2 >= KW_LIMIT && LA181_2 <= KW_LOAD)||(LA181_2 >= KW_LOCATION && LA181_2 <= KW_LONG)||LA181_2==KW_MANAGEMENT||(LA181_2 >= KW_MAPJOIN && LA181_2 <= KW_MATERIALIZED)||LA181_2==KW_METADATA||(LA181_2 >= KW_MINUTE && LA181_2 <= KW_MONTH)||(LA181_2 >= KW_MOVE && LA181_2 <= KW_MSCK)||(LA181_2 >= KW_NORELY && LA181_2 <= KW_NOSCAN)||LA181_2==KW_NOVALIDATE||LA181_2==KW_NULLS||LA181_2==KW_OFFSET||(LA181_2 >= KW_OPERATOR && LA181_2 <= KW_OPTION)||(LA181_2 >= KW_OUTPUTDRIVER && LA181_2 <= KW_OUTPUTFORMAT)||(LA181_2 >= KW_OVERWRITE && LA181_2 <= KW_OWNER)||(LA181_2 >= KW_PARTITIONED && LA181_2 <= KW_PATH)||(LA181_2 >= KW_PLAN && LA181_2 <= KW_POOL)||LA181_2==KW_PRINCIPALS||(LA181_2 >= KW_PURGE && LA181_2 <= KW_QUERY_PARALLELISM)||LA181_2==KW_READ||(LA181_2 >= KW_REBUILD && LA181_2 <= KW_RECORDWRITER)||(LA181_2 >= KW_RELOAD && LA181_2 <= KW_RESTRICT)||LA181_2==KW_REWRITE||(LA181_2 >= KW_ROLE && LA181_2 <= KW_ROLES)||(LA181_2 >= KW_SCHEDULING_POLICY && LA181_2 <= KW_SECOND)||(LA181_2 >= KW_SEMI && LA181_2 <= KW_SERVER)||(LA181_2 >= KW_SETS && LA181_2 <= KW_SKEWED)||(LA181_2 >= KW_SNAPSHOT && LA181_2 <= KW_SSL)||(LA181_2 >= KW_STATISTICS && LA181_2 <= KW_SUMMARY)||LA181_2==KW_TABLES||(LA181_2 >= KW_TBLPROPERTIES && LA181_2 <= KW_TERMINATED)||LA181_2==KW_TINYINT||(LA181_2 >= KW_TOUCH && LA181_2 <= KW_TRANSACTIONS)||LA181_2==KW_UNARCHIVE||LA181_2==KW_UNDO||LA181_2==KW_UNIONTYPE||(LA181_2 >= KW_UNLOCK && LA181_2 <= KW_UNSIGNED)||(LA181_2 >= KW_URI && LA181_2 <= KW_USE)||(LA181_2 >= KW_UTC && LA181_2 <= KW_VALIDATE)||LA181_2==KW_VALUE_TYPE||(LA181_2 >= KW_VECTORIZATION && LA181_2 <= KW_WEEK)||LA181_2==KW_WHILE||(LA181_2 >= KW_WORK && LA181_2 <= KW_ZONE)||LA181_2==KW_BATCH||LA181_2==KW_DAYOFWEEK||LA181_2==KW_HOLD_DDLTIME||LA181_2==KW_IGNORE||LA181_2==KW_NO_DROP||LA181_2==KW_OFFLINE||LA181_2==KW_PROTECTION||LA181_2==KW_READONLY||LA181_2==KW_TIMESTAMPTZ) ) {
					alt181=1;
				}
				else if ( (LA181_2==EOF||LA181_2==DOT||LA181_2==KW_PARTITION||LA181_2==LPAREN) ) {
					alt181=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 181, 2, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case Identifier:
			case KW_ABORT:
			case KW_ACTIVATE:
			case KW_ACTIVE:
			case KW_ADD:
			case KW_ADMIN:
			case KW_AFTER:
			case KW_ALLOC_FRACTION:
			case KW_ANALYZE:
			case KW_ARCHIVE:
			case KW_ASC:
			case KW_AUTOCOMMIT:
			case KW_BEFORE:
			case KW_BUCKET:
			case KW_BUCKETS:
			case KW_CACHE:
			case KW_CASCADE:
			case KW_CHANGE:
			case KW_CHECK:
			case KW_CLUSTER:
			case KW_CLUSTERED:
			case KW_CLUSTERSTATUS:
			case KW_COLLECTION:
			case KW_COLUMNS:
			case KW_COMMENT:
			case KW_COMPACT:
			case KW_COMPACTIONS:
			case KW_COMPUTE:
			case KW_CONCATENATE:
			case KW_CONTINUE:
			case KW_DATA:
			case KW_DATABASES:
			case KW_DATETIME:
			case KW_DAY:
			case KW_DBPROPERTIES:
			case KW_DEFAULT:
			case KW_DEFERRED:
			case KW_DEFINED:
			case KW_DELIMITED:
			case KW_DEPENDENCY:
			case KW_DESC:
			case KW_DETAIL:
			case KW_DIRECTORIES:
			case KW_DIRECTORY:
			case KW_DISABLE:
			case KW_DISTRIBUTE:
			case KW_DO:
			case KW_DOW:
			case KW_DUMP:
			case KW_ELEM_TYPE:
			case KW_ENABLE:
			case KW_ENFORCED:
			case KW_ESCAPED:
			case KW_EXCLUSIVE:
			case KW_EXPLAIN:
			case KW_EXPORT:
			case KW_EXPRESSION:
			case KW_FIELDS:
			case KW_FILE:
			case KW_FILEFORMAT:
			case KW_FIRST:
			case KW_FORMAT:
			case KW_FORMATTED:
			case KW_FUNCTIONS:
			case KW_HOUR:
			case KW_IDXPROPERTIES:
			case KW_INDEX:
			case KW_INDEXES:
			case KW_INPATH:
			case KW_INPUTDRIVER:
			case KW_INPUTFORMAT:
			case KW_ISOLATION:
			case KW_ITEMS:
			case KW_JAR:
			case KW_KEY:
			case KW_KEYS:
			case KW_KEY_TYPE:
			case KW_KILL:
			case KW_LAST:
			case KW_LEVEL:
			case KW_LIMIT:
			case KW_LINES:
			case KW_LOAD:
			case KW_LOCATION:
			case KW_LOCK:
			case KW_LOCKS:
			case KW_LOGICAL:
			case KW_LONG:
			case KW_MANAGEMENT:
			case KW_MAPJOIN:
			case KW_MAPPING:
			case KW_MATCHED:
			case KW_MATERIALIZED:
			case KW_METADATA:
			case KW_MINUTE:
			case KW_MONTH:
			case KW_MOVE:
			case KW_MSCK:
			case KW_NORELY:
			case KW_NOSCAN:
			case KW_NOVALIDATE:
			case KW_NULLS:
			case KW_OFFSET:
			case KW_OPERATOR:
			case KW_OPTION:
			case KW_OUTPUTDRIVER:
			case KW_OUTPUTFORMAT:
			case KW_OVERWRITE:
			case KW_OWNER:
			case KW_PARTITIONED:
			case KW_PARTITIONS:
			case KW_PATH:
			case KW_PLAN:
			case KW_PLANS:
			case KW_PLUS:
			case KW_POOL:
			case KW_PRINCIPALS:
			case KW_PURGE:
			case KW_QUARTER:
			case KW_QUERY:
			case KW_QUERY_PARALLELISM:
			case KW_READ:
			case KW_REBUILD:
			case KW_RECORDREADER:
			case KW_RECORDWRITER:
			case KW_RELOAD:
			case KW_RELY:
			case KW_RENAME:
			case KW_REOPTIMIZATION:
			case KW_REPAIR:
			case KW_REPL:
			case KW_REPLACE:
			case KW_REPLICATION:
			case KW_RESOURCE:
			case KW_RESTRICT:
			case KW_REWRITE:
			case KW_ROLE:
			case KW_ROLES:
			case KW_SCHEDULING_POLICY:
			case KW_SCHEMAS:
			case KW_SECOND:
			case KW_SEMI:
			case KW_SERDE:
			case KW_SERDEPROPERTIES:
			case KW_SETS:
			case KW_SHARED:
			case KW_SHOW:
			case KW_SHOW_DATABASE:
			case KW_SKEWED:
			case KW_SNAPSHOT:
			case KW_SORT:
			case KW_SORTED:
			case KW_SSL:
			case KW_STATISTICS:
			case KW_STATUS:
			case KW_STORED:
			case KW_STREAMTABLE:
			case KW_STRING:
			case KW_STRUCT:
			case KW_SUMMARY:
			case KW_TABLE:
			case KW_TABLES:
			case KW_TBLPROPERTIES:
			case KW_TEMPORARY:
			case KW_TERMINATED:
			case KW_TINYINT:
			case KW_TOUCH:
			case KW_TRANSACTION:
			case KW_TRANSACTIONS:
			case KW_UNARCHIVE:
			case KW_UNDO:
			case KW_UNIONTYPE:
			case KW_UNLOCK:
			case KW_UNMANAGED:
			case KW_UNSET:
			case KW_UNSIGNED:
			case KW_USE:
			case KW_UTC:
			case KW_UTCTIMESTAMP:
			case KW_VALIDATE:
			case KW_VALUE_TYPE:
			case KW_VECTORIZATION:
			case KW_VIEW:
			case KW_VIEWS:
			case KW_WAIT:
			case KW_WEEK:
			case KW_WHILE:
			case KW_WORK:
			case KW_WORKLOAD:
			case KW_WRITE:
			case KW_YEAR:
			case KW_ZONE:
			case KW_BATCH:
			case KW_DAYOFWEEK:
			case KW_HOLD_DDLTIME:
			case KW_IGNORE:
			case KW_NO_DROP:
			case KW_OFFLINE:
			case KW_PROTECTION:
			case KW_READONLY:
			case KW_TIMESTAMPTZ:
				{
				alt181=2;
				}
				break;
			case KW_URI:
				{
				int LA181_5 = input.LA(2);
				if ( (LA181_5==EOF||LA181_5==DOT||LA181_5==KW_PARTITION||LA181_5==LPAREN) ) {
					alt181=2;
				}
				else if ( (LA181_5==StringLiteral) ) {
					alt181=3;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 181, 5, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_SERVER:
				{
				int LA181_6 = input.LA(2);
				if ( (LA181_6==EOF||LA181_6==DOT||LA181_6==KW_PARTITION||LA181_6==LPAREN) ) {
					alt181=2;
				}
				else if ( (LA181_6==Identifier||(LA181_6 >= KW_ABORT && LA181_6 <= KW_AFTER)||LA181_6==KW_ALLOC_FRACTION||LA181_6==KW_ANALYZE||LA181_6==KW_ARCHIVE||LA181_6==KW_ASC||(LA181_6 >= KW_AUTOCOMMIT && LA181_6 <= KW_BEFORE)||(LA181_6 >= KW_BUCKET && LA181_6 <= KW_BUCKETS)||(LA181_6 >= KW_CACHE && LA181_6 <= KW_CASCADE)||LA181_6==KW_CHANGE||(LA181_6 >= KW_CHECK && LA181_6 <= KW_COLLECTION)||(LA181_6 >= KW_COLUMNS && LA181_6 <= KW_COMMENT)||(LA181_6 >= KW_COMPACT && LA181_6 <= KW_CONCATENATE)||LA181_6==KW_CONTINUE||LA181_6==KW_DATA||LA181_6==KW_DATABASES||(LA181_6 >= KW_DATETIME && LA181_6 <= KW_DBPROPERTIES)||(LA181_6 >= KW_DEFAULT && LA181_6 <= KW_DEFINED)||(LA181_6 >= KW_DELIMITED && LA181_6 <= KW_DESC)||(LA181_6 >= KW_DETAIL && LA181_6 <= KW_DISABLE)||(LA181_6 >= KW_DISTRIBUTE && LA181_6 <= KW_DO)||LA181_6==KW_DOW||(LA181_6 >= KW_DUMP && LA181_6 <= KW_ELEM_TYPE)||LA181_6==KW_ENABLE||(LA181_6 >= KW_ENFORCED && LA181_6 <= KW_ESCAPED)||LA181_6==KW_EXCLUSIVE||(LA181_6 >= KW_EXPLAIN && LA181_6 <= KW_EXPRESSION)||(LA181_6 >= KW_FIELDS && LA181_6 <= KW_FIRST)||(LA181_6 >= KW_FORMAT && LA181_6 <= KW_FORMATTED)||LA181_6==KW_FUNCTIONS||(LA181_6 >= KW_HOUR && LA181_6 <= KW_IDXPROPERTIES)||(LA181_6 >= KW_INDEX && LA181_6 <= KW_INDEXES)||(LA181_6 >= KW_INPATH && LA181_6 <= KW_INPUTFORMAT)||(LA181_6 >= KW_ISOLATION && LA181_6 <= KW_JAR)||(LA181_6 >= KW_KEY && LA181_6 <= KW_LAST)||LA181_6==KW_LEVEL||(LA181_6 >= KW_LIMIT && LA181_6 <= KW_LOAD)||(LA181_6 >= KW_LOCATION && LA181_6 <= KW_LONG)||LA181_6==KW_MANAGEMENT||(LA181_6 >= KW_MAPJOIN && LA181_6 <= KW_MATERIALIZED)||LA181_6==KW_METADATA||(LA181_6 >= KW_MINUTE && LA181_6 <= KW_MONTH)||(LA181_6 >= KW_MOVE && LA181_6 <= KW_MSCK)||(LA181_6 >= KW_NORELY && LA181_6 <= KW_NOSCAN)||LA181_6==KW_NOVALIDATE||LA181_6==KW_NULLS||LA181_6==KW_OFFSET||(LA181_6 >= KW_OPERATOR && LA181_6 <= KW_OPTION)||(LA181_6 >= KW_OUTPUTDRIVER && LA181_6 <= KW_OUTPUTFORMAT)||(LA181_6 >= KW_OVERWRITE && LA181_6 <= KW_OWNER)||(LA181_6 >= KW_PARTITIONED && LA181_6 <= KW_PATH)||(LA181_6 >= KW_PLAN && LA181_6 <= KW_POOL)||LA181_6==KW_PRINCIPALS||(LA181_6 >= KW_PURGE && LA181_6 <= KW_QUERY_PARALLELISM)||LA181_6==KW_READ||(LA181_6 >= KW_REBUILD && LA181_6 <= KW_RECORDWRITER)||(LA181_6 >= KW_RELOAD && LA181_6 <= KW_RESTRICT)||LA181_6==KW_REWRITE||(LA181_6 >= KW_ROLE && LA181_6 <= KW_ROLES)||(LA181_6 >= KW_SCHEDULING_POLICY && LA181_6 <= KW_SECOND)||(LA181_6 >= KW_SEMI && LA181_6 <= KW_SERVER)||(LA181_6 >= KW_SETS && LA181_6 <= KW_SKEWED)||(LA181_6 >= KW_SNAPSHOT && LA181_6 <= KW_SSL)||(LA181_6 >= KW_STATISTICS && LA181_6 <= KW_SUMMARY)||LA181_6==KW_TABLES||(LA181_6 >= KW_TBLPROPERTIES && LA181_6 <= KW_TERMINATED)||LA181_6==KW_TINYINT||(LA181_6 >= KW_TOUCH && LA181_6 <= KW_TRANSACTIONS)||LA181_6==KW_UNARCHIVE||LA181_6==KW_UNDO||LA181_6==KW_UNIONTYPE||(LA181_6 >= KW_UNLOCK && LA181_6 <= KW_UNSIGNED)||(LA181_6 >= KW_URI && LA181_6 <= KW_USE)||(LA181_6 >= KW_UTC && LA181_6 <= KW_VALIDATE)||LA181_6==KW_VALUE_TYPE||(LA181_6 >= KW_VECTORIZATION && LA181_6 <= KW_WEEK)||LA181_6==KW_WHILE||(LA181_6 >= KW_WORK && LA181_6 <= KW_ZONE)||LA181_6==KW_BATCH||LA181_6==KW_DAYOFWEEK||LA181_6==KW_HOLD_DDLTIME||LA181_6==KW_IGNORE||LA181_6==KW_NO_DROP||LA181_6==KW_OFFLINE||LA181_6==KW_PROTECTION||LA181_6==KW_READONLY||LA181_6==KW_TIMESTAMPTZ) ) {
					alt181=4;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 181, 6, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 181, 0, input);
				throw nvae;
			}
			switch (alt181) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1747:7: ( KW_DATABASE | KW_SCHEMA ) identifier
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1747:7: ( KW_DATABASE | KW_SCHEMA )
					int alt177=2;
					int LA177_0 = input.LA(1);
					if ( (LA177_0==KW_DATABASE) ) {
						alt177=1;
					}
					else if ( (LA177_0==KW_SCHEMA) ) {
						alt177=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 177, 0, input);
						throw nvae;
					}

					switch (alt177) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1747:8: KW_DATABASE
							{
							KW_DATABASE585=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_privObjectCols9550); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DATABASE.add(KW_DATABASE585);

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1747:20: KW_SCHEMA
							{
							KW_SCHEMA586=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_privObjectCols9552); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_SCHEMA.add(KW_SCHEMA586);

							}
							break;

					}

					pushFollow(FOLLOW_identifier_in_privObjectCols9555);
					identifier587=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier587.getTree());
					// AST REWRITE
					// elements: identifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1747:42: -> ^( TOK_DB_TYPE identifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1747:45: ^( TOK_DB_TYPE identifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DB_TYPE, "TOK_DB_TYPE"), root_1);
						adaptor.addChild(root_1, stream_identifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:7: ( KW_TABLE )? tableName ( LPAREN cols= columnNameList RPAREN )? ( partitionSpec )?
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:7: ( KW_TABLE )?
					int alt178=2;
					int LA178_0 = input.LA(1);
					if ( (LA178_0==KW_TABLE) ) {
						alt178=1;
					}
					switch (alt178) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:7: KW_TABLE
							{
							KW_TABLE588=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_privObjectCols9571); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE588);

							}
							break;

					}

					pushFollow(FOLLOW_tableName_in_privObjectCols9574);
					tableName589=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName589.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:27: ( LPAREN cols= columnNameList RPAREN )?
					int alt179=2;
					int LA179_0 = input.LA(1);
					if ( (LA179_0==LPAREN) ) {
						alt179=1;
					}
					switch (alt179) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:28: LPAREN cols= columnNameList RPAREN
							{
							LPAREN590=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_privObjectCols9577); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN590);

							pushFollow(FOLLOW_columnNameList_in_privObjectCols9581);
							cols=columnNameList();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_columnNameList.add(cols.getTree());
							RPAREN591=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_privObjectCols9583); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN591);

							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:64: ( partitionSpec )?
					int alt180=2;
					int LA180_0 = input.LA(1);
					if ( (LA180_0==KW_PARTITION) ) {
						alt180=1;
					}
					switch (alt180) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:64: partitionSpec
							{
							pushFollow(FOLLOW_partitionSpec_in_privObjectCols9587);
							partitionSpec592=partitionSpec();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec592.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: tableName, cols, partitionSpec
					// token labels: 
					// rule labels: cols, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_cols=new RewriteRuleSubtreeStream(adaptor,"rule cols",cols!=null?cols.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1748:79: -> ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:82: ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLE_TYPE, "TOK_TABLE_TYPE"), root_1);
						adaptor.addChild(root_1, stream_tableName.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:110: ( $cols)?
						if ( stream_cols.hasNext() ) {
							adaptor.addChild(root_1, stream_cols.nextTree());
						}
						stream_cols.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:116: ( partitionSpec )?
						if ( stream_partitionSpec.hasNext() ) {
							adaptor.addChild(root_1, stream_partitionSpec.nextTree());
						}
						stream_partitionSpec.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1749:7: KW_URI (path= StringLiteral )
					{
					KW_URI593=(Token)match(input,KW_URI,FOLLOW_KW_URI_in_privObjectCols9611); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_URI.add(KW_URI593);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1749:14: (path= StringLiteral )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1749:15: path= StringLiteral
					{
					path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_privObjectCols9616); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(path);

					}

					// AST REWRITE
					// elements: path
					// token labels: path
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1749:35: -> ^( TOK_URI_TYPE $path)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1749:39: ^( TOK_URI_TYPE $path)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_URI_TYPE, "TOK_URI_TYPE"), root_1);
						adaptor.addChild(root_1, stream_path.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1750:7: KW_SERVER identifier
					{
					KW_SERVER594=(Token)match(input,KW_SERVER,FOLLOW_KW_SERVER_in_privObjectCols9635); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SERVER.add(KW_SERVER594);

					pushFollow(FOLLOW_identifier_in_privObjectCols9637);
					identifier595=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier595.getTree());
					// AST REWRITE
					// elements: identifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1750:28: -> ^( TOK_SERVER_TYPE identifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1750:31: ^( TOK_SERVER_TYPE identifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SERVER_TYPE, "TOK_SERVER_TYPE"), root_1);
						adaptor.addChild(root_1, stream_identifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "privObjectCols"


	public static class privilegeList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "privilegeList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1753:1: privilegeList : privlegeDef ( COMMA privlegeDef )* -> ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ ) ;
	public final HiveParser.privilegeList_return privilegeList() throws RecognitionException {
		HiveParser.privilegeList_return retval = new HiveParser.privilegeList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA597=null;
		ParserRuleReturnScope privlegeDef596 =null;
		ParserRuleReturnScope privlegeDef598 =null;

		ASTNode COMMA597_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_privlegeDef=new RewriteRuleSubtreeStream(adaptor,"rule privlegeDef");

		pushMsg("grant privilege list", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1756:5: ( privlegeDef ( COMMA privlegeDef )* -> ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1756:7: privlegeDef ( COMMA privlegeDef )*
			{
			pushFollow(FOLLOW_privlegeDef_in_privilegeList9672);
			privlegeDef596=privlegeDef();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_privlegeDef.add(privlegeDef596.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1756:19: ( COMMA privlegeDef )*
			loop182:
			while (true) {
				int alt182=2;
				int LA182_0 = input.LA(1);
				if ( (LA182_0==COMMA) ) {
					alt182=1;
				}

				switch (alt182) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1756:20: COMMA privlegeDef
					{
					COMMA597=(Token)match(input,COMMA,FOLLOW_COMMA_in_privilegeList9675); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA597);

					pushFollow(FOLLOW_privlegeDef_in_privilegeList9677);
					privlegeDef598=privlegeDef();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_privlegeDef.add(privlegeDef598.getTree());
					}
					break;

				default :
					break loop182;
				}
			}

			// AST REWRITE
			// elements: privlegeDef
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1757:5: -> ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1757:8: ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIVILEGE_LIST, "TOK_PRIVILEGE_LIST"), root_1);
				if ( !(stream_privlegeDef.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_privlegeDef.hasNext() ) {
					adaptor.addChild(root_1, stream_privlegeDef.nextTree());
				}
				stream_privlegeDef.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "privilegeList"


	public static class privlegeDef_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "privlegeDef"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1760:1: privlegeDef : privilegeType ( LPAREN cols= columnNameList RPAREN )? -> ^( TOK_PRIVILEGE privilegeType ( $cols)? ) ;
	public final HiveParser.privlegeDef_return privlegeDef() throws RecognitionException {
		HiveParser.privlegeDef_return retval = new HiveParser.privlegeDef_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN600=null;
		Token RPAREN601=null;
		ParserRuleReturnScope cols =null;
		ParserRuleReturnScope privilegeType599 =null;

		ASTNode LPAREN600_tree=null;
		ASTNode RPAREN601_tree=null;
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
		RewriteRuleSubtreeStream stream_privilegeType=new RewriteRuleSubtreeStream(adaptor,"rule privilegeType");

		pushMsg("grant privilege", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1763:5: ( privilegeType ( LPAREN cols= columnNameList RPAREN )? -> ^( TOK_PRIVILEGE privilegeType ( $cols)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1763:7: privilegeType ( LPAREN cols= columnNameList RPAREN )?
			{
			pushFollow(FOLLOW_privilegeType_in_privlegeDef9719);
			privilegeType599=privilegeType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_privilegeType.add(privilegeType599.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1763:21: ( LPAREN cols= columnNameList RPAREN )?
			int alt183=2;
			int LA183_0 = input.LA(1);
			if ( (LA183_0==LPAREN) ) {
				alt183=1;
			}
			switch (alt183) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1763:22: LPAREN cols= columnNameList RPAREN
					{
					LPAREN600=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_privlegeDef9722); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN600);

					pushFollow(FOLLOW_columnNameList_in_privlegeDef9726);
					cols=columnNameList();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameList.add(cols.getTree());
					RPAREN601=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_privlegeDef9728); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN601);

					}
					break;

			}

			// AST REWRITE
			// elements: privilegeType, cols
			// token labels: 
			// rule labels: cols, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_cols=new RewriteRuleSubtreeStream(adaptor,"rule cols",cols!=null?cols.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1764:5: -> ^( TOK_PRIVILEGE privilegeType ( $cols)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1764:8: ^( TOK_PRIVILEGE privilegeType ( $cols)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIVILEGE, "TOK_PRIVILEGE"), root_1);
				adaptor.addChild(root_1, stream_privilegeType.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1764:39: ( $cols)?
				if ( stream_cols.hasNext() ) {
					adaptor.addChild(root_1, stream_cols.nextTree());
				}
				stream_cols.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "privlegeDef"


	public static class privilegeType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "privilegeType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1767:1: privilegeType : ( KW_ALL -> ^( TOK_PRIV_ALL ) | KW_ALTER -> ^( TOK_PRIV_ALTER_METADATA ) | KW_UPDATE -> ^( TOK_PRIV_ALTER_DATA ) | KW_CREATE -> ^( TOK_PRIV_CREATE ) | KW_DROP -> ^( TOK_PRIV_DROP ) | KW_LOCK -> ^( TOK_PRIV_LOCK ) | KW_SELECT -> ^( TOK_PRIV_SELECT ) | KW_SHOW_DATABASE -> ^( TOK_PRIV_SHOW_DATABASE ) | KW_INSERT -> ^( TOK_PRIV_INSERT ) | KW_DELETE -> ^( TOK_PRIV_DELETE ) );
	public final HiveParser.privilegeType_return privilegeType() throws RecognitionException {
		HiveParser.privilegeType_return retval = new HiveParser.privilegeType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ALL602=null;
		Token KW_ALTER603=null;
		Token KW_UPDATE604=null;
		Token KW_CREATE605=null;
		Token KW_DROP606=null;
		Token KW_LOCK607=null;
		Token KW_SELECT608=null;
		Token KW_SHOW_DATABASE609=null;
		Token KW_INSERT610=null;
		Token KW_DELETE611=null;

		ASTNode KW_ALL602_tree=null;
		ASTNode KW_ALTER603_tree=null;
		ASTNode KW_UPDATE604_tree=null;
		ASTNode KW_CREATE605_tree=null;
		ASTNode KW_DROP606_tree=null;
		ASTNode KW_LOCK607_tree=null;
		ASTNode KW_SELECT608_tree=null;
		ASTNode KW_SHOW_DATABASE609_tree=null;
		ASTNode KW_INSERT610_tree=null;
		ASTNode KW_DELETE611_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_DELETE=new RewriteRuleTokenStream(adaptor,"token KW_DELETE");
		RewriteRuleTokenStream stream_KW_SHOW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_SHOW_DATABASE");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_KW_ALTER=new RewriteRuleTokenStream(adaptor,"token KW_ALTER");
		RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
		RewriteRuleTokenStream stream_KW_LOCK=new RewriteRuleTokenStream(adaptor,"token KW_LOCK");
		RewriteRuleTokenStream stream_KW_INSERT=new RewriteRuleTokenStream(adaptor,"token KW_INSERT");
		RewriteRuleTokenStream stream_KW_SELECT=new RewriteRuleTokenStream(adaptor,"token KW_SELECT");
		RewriteRuleTokenStream stream_KW_ALL=new RewriteRuleTokenStream(adaptor,"token KW_ALL");

		pushMsg("privilege type", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1770:5: ( KW_ALL -> ^( TOK_PRIV_ALL ) | KW_ALTER -> ^( TOK_PRIV_ALTER_METADATA ) | KW_UPDATE -> ^( TOK_PRIV_ALTER_DATA ) | KW_CREATE -> ^( TOK_PRIV_CREATE ) | KW_DROP -> ^( TOK_PRIV_DROP ) | KW_LOCK -> ^( TOK_PRIV_LOCK ) | KW_SELECT -> ^( TOK_PRIV_SELECT ) | KW_SHOW_DATABASE -> ^( TOK_PRIV_SHOW_DATABASE ) | KW_INSERT -> ^( TOK_PRIV_INSERT ) | KW_DELETE -> ^( TOK_PRIV_DELETE ) )
			int alt184=10;
			switch ( input.LA(1) ) {
			case KW_ALL:
				{
				alt184=1;
				}
				break;
			case KW_ALTER:
				{
				alt184=2;
				}
				break;
			case KW_UPDATE:
				{
				alt184=3;
				}
				break;
			case KW_CREATE:
				{
				alt184=4;
				}
				break;
			case KW_DROP:
				{
				alt184=5;
				}
				break;
			case KW_LOCK:
				{
				alt184=6;
				}
				break;
			case KW_SELECT:
				{
				alt184=7;
				}
				break;
			case KW_SHOW_DATABASE:
				{
				alt184=8;
				}
				break;
			case KW_INSERT:
				{
				alt184=9;
				}
				break;
			case KW_DELETE:
				{
				alt184=10;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 184, 0, input);
				throw nvae;
			}
			switch (alt184) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1770:7: KW_ALL
					{
					KW_ALL602=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_privilegeType9773); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALL.add(KW_ALL602);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1770:14: -> ^( TOK_PRIV_ALL )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1770:17: ^( TOK_PRIV_ALL )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_ALL, "TOK_PRIV_ALL"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1771:7: KW_ALTER
					{
					KW_ALTER603=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_privilegeType9787); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALTER.add(KW_ALTER603);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1771:16: -> ^( TOK_PRIV_ALTER_METADATA )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1771:19: ^( TOK_PRIV_ALTER_METADATA )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_ALTER_METADATA, "TOK_PRIV_ALTER_METADATA"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1772:7: KW_UPDATE
					{
					KW_UPDATE604=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_privilegeType9801); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_UPDATE.add(KW_UPDATE604);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1772:17: -> ^( TOK_PRIV_ALTER_DATA )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1772:20: ^( TOK_PRIV_ALTER_DATA )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_ALTER_DATA, "TOK_PRIV_ALTER_DATA"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1773:7: KW_CREATE
					{
					KW_CREATE605=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_privilegeType9815); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE605);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1773:17: -> ^( TOK_PRIV_CREATE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1773:20: ^( TOK_PRIV_CREATE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_CREATE, "TOK_PRIV_CREATE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1774:7: KW_DROP
					{
					KW_DROP606=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_privilegeType9829); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP606);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1774:15: -> ^( TOK_PRIV_DROP )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1774:18: ^( TOK_PRIV_DROP )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_DROP, "TOK_PRIV_DROP"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1775:7: KW_LOCK
					{
					KW_LOCK607=(Token)match(input,KW_LOCK,FOLLOW_KW_LOCK_in_privilegeType9843); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LOCK.add(KW_LOCK607);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1775:15: -> ^( TOK_PRIV_LOCK )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1775:18: ^( TOK_PRIV_LOCK )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_LOCK, "TOK_PRIV_LOCK"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1776:7: KW_SELECT
					{
					KW_SELECT608=(Token)match(input,KW_SELECT,FOLLOW_KW_SELECT_in_privilegeType9857); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SELECT.add(KW_SELECT608);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1776:17: -> ^( TOK_PRIV_SELECT )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1776:20: ^( TOK_PRIV_SELECT )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_SELECT, "TOK_PRIV_SELECT"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1777:7: KW_SHOW_DATABASE
					{
					KW_SHOW_DATABASE609=(Token)match(input,KW_SHOW_DATABASE,FOLLOW_KW_SHOW_DATABASE_in_privilegeType9871); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SHOW_DATABASE.add(KW_SHOW_DATABASE609);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1777:24: -> ^( TOK_PRIV_SHOW_DATABASE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1777:27: ^( TOK_PRIV_SHOW_DATABASE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_SHOW_DATABASE, "TOK_PRIV_SHOW_DATABASE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 9 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1778:7: KW_INSERT
					{
					KW_INSERT610=(Token)match(input,KW_INSERT,FOLLOW_KW_INSERT_in_privilegeType9885); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INSERT.add(KW_INSERT610);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1778:17: -> ^( TOK_PRIV_INSERT )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1778:20: ^( TOK_PRIV_INSERT )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_INSERT, "TOK_PRIV_INSERT"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 10 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1779:7: KW_DELETE
					{
					KW_DELETE611=(Token)match(input,KW_DELETE,FOLLOW_KW_DELETE_in_privilegeType9899); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DELETE.add(KW_DELETE611);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1779:17: -> ^( TOK_PRIV_DELETE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1779:20: ^( TOK_PRIV_DELETE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRIV_DELETE, "TOK_PRIV_DELETE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "privilegeType"


	public static class principalSpecification_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "principalSpecification"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1782:1: principalSpecification : principalName ( COMMA principalName )* -> ^( TOK_PRINCIPAL_NAME ( principalName )+ ) ;
	public final HiveParser.principalSpecification_return principalSpecification() throws RecognitionException {
		HiveParser.principalSpecification_return retval = new HiveParser.principalSpecification_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA613=null;
		ParserRuleReturnScope principalName612 =null;
		ParserRuleReturnScope principalName614 =null;

		ASTNode COMMA613_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");

		 pushMsg("user/group/role name list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1785:5: ( principalName ( COMMA principalName )* -> ^( TOK_PRINCIPAL_NAME ( principalName )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1785:7: principalName ( COMMA principalName )*
			{
			pushFollow(FOLLOW_principalName_in_principalSpecification9932);
			principalName612=principalName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_principalName.add(principalName612.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1785:21: ( COMMA principalName )*
			loop185:
			while (true) {
				int alt185=2;
				int LA185_0 = input.LA(1);
				if ( (LA185_0==COMMA) ) {
					alt185=1;
				}

				switch (alt185) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1785:22: COMMA principalName
					{
					COMMA613=(Token)match(input,COMMA,FOLLOW_COMMA_in_principalSpecification9935); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA613);

					pushFollow(FOLLOW_principalName_in_principalSpecification9937);
					principalName614=principalName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_principalName.add(principalName614.getTree());
					}
					break;

				default :
					break loop185;
				}
			}

			// AST REWRITE
			// elements: principalName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1785:44: -> ^( TOK_PRINCIPAL_NAME ( principalName )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1785:47: ^( TOK_PRINCIPAL_NAME ( principalName )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_PRINCIPAL_NAME, "TOK_PRINCIPAL_NAME"), root_1);
				if ( !(stream_principalName.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_principalName.hasNext() ) {
					adaptor.addChild(root_1, stream_principalName.nextTree());
				}
				stream_principalName.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "principalSpecification"


	public static class principalName_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "principalName"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1788:1: principalName : ( KW_USER principalIdentifier -> ^( TOK_USER principalIdentifier ) | KW_GROUP principalIdentifier -> ^( TOK_GROUP principalIdentifier ) | KW_ROLE identifier -> ^( TOK_ROLE identifier ) );
	public final HiveParser.principalName_return principalName() throws RecognitionException {
		HiveParser.principalName_return retval = new HiveParser.principalName_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_USER615=null;
		Token KW_GROUP617=null;
		Token KW_ROLE619=null;
		ParserRuleReturnScope principalIdentifier616 =null;
		ParserRuleReturnScope principalIdentifier618 =null;
		ParserRuleReturnScope identifier620 =null;

		ASTNode KW_USER615_tree=null;
		ASTNode KW_GROUP617_tree=null;
		ASTNode KW_ROLE619_tree=null;
		RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
		RewriteRuleTokenStream stream_KW_USER=new RewriteRuleTokenStream(adaptor,"token KW_USER");
		RewriteRuleTokenStream stream_KW_GROUP=new RewriteRuleTokenStream(adaptor,"token KW_GROUP");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_principalIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule principalIdentifier");

		pushMsg("user|group|role name", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1791:5: ( KW_USER principalIdentifier -> ^( TOK_USER principalIdentifier ) | KW_GROUP principalIdentifier -> ^( TOK_GROUP principalIdentifier ) | KW_ROLE identifier -> ^( TOK_ROLE identifier ) )
			int alt186=3;
			switch ( input.LA(1) ) {
			case KW_USER:
				{
				alt186=1;
				}
				break;
			case KW_GROUP:
				{
				alt186=2;
				}
				break;
			case KW_ROLE:
				{
				alt186=3;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 186, 0, input);
				throw nvae;
			}
			switch (alt186) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1791:7: KW_USER principalIdentifier
					{
					KW_USER615=(Token)match(input,KW_USER,FOLLOW_KW_USER_in_principalName9975); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_USER.add(KW_USER615);

					pushFollow(FOLLOW_principalIdentifier_in_principalName9977);
					principalIdentifier616=principalIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_principalIdentifier.add(principalIdentifier616.getTree());
					// AST REWRITE
					// elements: principalIdentifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1791:35: -> ^( TOK_USER principalIdentifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1791:38: ^( TOK_USER principalIdentifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_USER, "TOK_USER"), root_1);
						adaptor.addChild(root_1, stream_principalIdentifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1792:7: KW_GROUP principalIdentifier
					{
					KW_GROUP617=(Token)match(input,KW_GROUP,FOLLOW_KW_GROUP_in_principalName9993); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_GROUP.add(KW_GROUP617);

					pushFollow(FOLLOW_principalIdentifier_in_principalName9995);
					principalIdentifier618=principalIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_principalIdentifier.add(principalIdentifier618.getTree());
					// AST REWRITE
					// elements: principalIdentifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1792:36: -> ^( TOK_GROUP principalIdentifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1792:39: ^( TOK_GROUP principalIdentifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_GROUP, "TOK_GROUP"), root_1);
						adaptor.addChild(root_1, stream_principalIdentifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1793:7: KW_ROLE identifier
					{
					KW_ROLE619=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_principalName10011); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ROLE.add(KW_ROLE619);

					pushFollow(FOLLOW_identifier_in_principalName10013);
					identifier620=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier620.getTree());
					// AST REWRITE
					// elements: identifier
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1793:26: -> ^( TOK_ROLE identifier )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1793:29: ^( TOK_ROLE identifier )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ROLE, "TOK_ROLE"), root_1);
						adaptor.addChild(root_1, stream_identifier.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "principalName"


	public static class withGrantOption_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "withGrantOption"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1796:1: withGrantOption : KW_WITH KW_GRANT KW_OPTION -> ^( TOK_GRANT_WITH_OPTION ) ;
	public final HiveParser.withGrantOption_return withGrantOption() throws RecognitionException {
		HiveParser.withGrantOption_return retval = new HiveParser.withGrantOption_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_WITH621=null;
		Token KW_GRANT622=null;
		Token KW_OPTION623=null;

		ASTNode KW_WITH621_tree=null;
		ASTNode KW_GRANT622_tree=null;
		ASTNode KW_OPTION623_tree=null;
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
		RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");

		pushMsg("with grant option", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1799:5: ( KW_WITH KW_GRANT KW_OPTION -> ^( TOK_GRANT_WITH_OPTION ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1799:7: KW_WITH KW_GRANT KW_OPTION
			{
			KW_WITH621=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_withGrantOption10048); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH621);

			KW_GRANT622=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_withGrantOption10050); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_GRANT.add(KW_GRANT622);

			KW_OPTION623=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_withGrantOption10052); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_OPTION.add(KW_OPTION623);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1800:5: -> ^( TOK_GRANT_WITH_OPTION )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1800:8: ^( TOK_GRANT_WITH_OPTION )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_GRANT_WITH_OPTION, "TOK_GRANT_WITH_OPTION"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "withGrantOption"


	public static class grantOptionFor_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "grantOptionFor"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1803:1: grantOptionFor : KW_GRANT KW_OPTION KW_FOR -> ^( TOK_GRANT_OPTION_FOR ) ;
	public final HiveParser.grantOptionFor_return grantOptionFor() throws RecognitionException {
		HiveParser.grantOptionFor_return retval = new HiveParser.grantOptionFor_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_GRANT624=null;
		Token KW_OPTION625=null;
		Token KW_FOR626=null;

		ASTNode KW_GRANT624_tree=null;
		ASTNode KW_OPTION625_tree=null;
		ASTNode KW_FOR626_tree=null;
		RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
		RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
		RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");

		pushMsg("grant option for", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1806:5: ( KW_GRANT KW_OPTION KW_FOR -> ^( TOK_GRANT_OPTION_FOR ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1806:7: KW_GRANT KW_OPTION KW_FOR
			{
			KW_GRANT624=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_grantOptionFor10089); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_GRANT.add(KW_GRANT624);

			KW_OPTION625=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_grantOptionFor10091); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_OPTION.add(KW_OPTION625);

			KW_FOR626=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_grantOptionFor10093); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FOR.add(KW_FOR626);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1807:5: -> ^( TOK_GRANT_OPTION_FOR )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1807:8: ^( TOK_GRANT_OPTION_FOR )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_GRANT_OPTION_FOR, "TOK_GRANT_OPTION_FOR"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "grantOptionFor"


	public static class adminOptionFor_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "adminOptionFor"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1810:1: adminOptionFor : KW_ADMIN KW_OPTION KW_FOR -> ^( TOK_ADMIN_OPTION_FOR ) ;
	public final HiveParser.adminOptionFor_return adminOptionFor() throws RecognitionException {
		HiveParser.adminOptionFor_return retval = new HiveParser.adminOptionFor_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ADMIN627=null;
		Token KW_OPTION628=null;
		Token KW_FOR629=null;

		ASTNode KW_ADMIN627_tree=null;
		ASTNode KW_OPTION628_tree=null;
		ASTNode KW_FOR629_tree=null;
		RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
		RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");
		RewriteRuleTokenStream stream_KW_ADMIN=new RewriteRuleTokenStream(adaptor,"token KW_ADMIN");

		pushMsg("admin option for", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1813:5: ( KW_ADMIN KW_OPTION KW_FOR -> ^( TOK_ADMIN_OPTION_FOR ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1813:7: KW_ADMIN KW_OPTION KW_FOR
			{
			KW_ADMIN627=(Token)match(input,KW_ADMIN,FOLLOW_KW_ADMIN_in_adminOptionFor10126); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ADMIN.add(KW_ADMIN627);

			KW_OPTION628=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_adminOptionFor10128); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_OPTION.add(KW_OPTION628);

			KW_FOR629=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_adminOptionFor10130); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FOR.add(KW_FOR629);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1814:5: -> ^( TOK_ADMIN_OPTION_FOR )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1814:8: ^( TOK_ADMIN_OPTION_FOR )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ADMIN_OPTION_FOR, "TOK_ADMIN_OPTION_FOR"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "adminOptionFor"


	public static class withAdminOption_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "withAdminOption"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1817:1: withAdminOption : KW_WITH KW_ADMIN KW_OPTION -> ^( TOK_GRANT_WITH_ADMIN_OPTION ) ;
	public final HiveParser.withAdminOption_return withAdminOption() throws RecognitionException {
		HiveParser.withAdminOption_return retval = new HiveParser.withAdminOption_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_WITH630=null;
		Token KW_ADMIN631=null;
		Token KW_OPTION632=null;

		ASTNode KW_WITH630_tree=null;
		ASTNode KW_ADMIN631_tree=null;
		ASTNode KW_OPTION632_tree=null;
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");
		RewriteRuleTokenStream stream_KW_ADMIN=new RewriteRuleTokenStream(adaptor,"token KW_ADMIN");

		pushMsg("with admin option", state);
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1820:5: ( KW_WITH KW_ADMIN KW_OPTION -> ^( TOK_GRANT_WITH_ADMIN_OPTION ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1820:7: KW_WITH KW_ADMIN KW_OPTION
			{
			KW_WITH630=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_withAdminOption10163); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH630);

			KW_ADMIN631=(Token)match(input,KW_ADMIN,FOLLOW_KW_ADMIN_in_withAdminOption10165); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ADMIN.add(KW_ADMIN631);

			KW_OPTION632=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_withAdminOption10167); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_OPTION.add(KW_OPTION632);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1821:5: -> ^( TOK_GRANT_WITH_ADMIN_OPTION )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1821:8: ^( TOK_GRANT_WITH_ADMIN_OPTION )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_GRANT_WITH_ADMIN_OPTION, "TOK_GRANT_WITH_ADMIN_OPTION"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) {popMsg(state);}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "withAdminOption"


	public static class metastoreCheck_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "metastoreCheck"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1824:1: metastoreCheck : KW_MSCK (repair= KW_REPAIR )? ( KW_TABLE tableName ( (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC ) (parts= KW_PARTITIONS ) )? | ( partitionSpec )? ) -> ^( TOK_MSCK ( $repair)? ( tableName )? ( $add)? ( $drop)? ( $sync)? ( ( partitionSpec )* )? ) ;
	public final HiveParser.metastoreCheck_return metastoreCheck() throws RecognitionException {
		HiveParser.metastoreCheck_return retval = new HiveParser.metastoreCheck_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token repair=null;
		Token add=null;
		Token drop=null;
		Token sync=null;
		Token parts=null;
		Token KW_MSCK633=null;
		Token KW_TABLE634=null;
		ParserRuleReturnScope tableName635 =null;
		ParserRuleReturnScope partitionSpec636 =null;

		ASTNode repair_tree=null;
		ASTNode add_tree=null;
		ASTNode drop_tree=null;
		ASTNode sync_tree=null;
		ASTNode parts_tree=null;
		ASTNode KW_MSCK633_tree=null;
		ASTNode KW_TABLE634_tree=null;
		RewriteRuleTokenStream stream_KW_REPAIR=new RewriteRuleTokenStream(adaptor,"token KW_REPAIR");
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_SYNC=new RewriteRuleTokenStream(adaptor,"token KW_SYNC");
		RewriteRuleTokenStream stream_KW_MSCK=new RewriteRuleTokenStream(adaptor,"token KW_MSCK");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_ADD=new RewriteRuleTokenStream(adaptor,"token KW_ADD");
		RewriteRuleTokenStream stream_KW_PARTITIONS=new RewriteRuleTokenStream(adaptor,"token KW_PARTITIONS");
		RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("metastore check statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1827:5: ( KW_MSCK (repair= KW_REPAIR )? ( KW_TABLE tableName ( (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC ) (parts= KW_PARTITIONS ) )? | ( partitionSpec )? ) -> ^( TOK_MSCK ( $repair)? ( tableName )? ( $add)? ( $drop)? ( $sync)? ( ( partitionSpec )* )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1827:7: KW_MSCK (repair= KW_REPAIR )? ( KW_TABLE tableName ( (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC ) (parts= KW_PARTITIONS ) )? | ( partitionSpec )? )
			{
			KW_MSCK633=(Token)match(input,KW_MSCK,FOLLOW_KW_MSCK_in_metastoreCheck10204); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MSCK.add(KW_MSCK633);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1827:15: (repair= KW_REPAIR )?
			int alt187=2;
			int LA187_0 = input.LA(1);
			if ( (LA187_0==KW_REPAIR) ) {
				alt187=1;
			}
			switch (alt187) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1827:16: repair= KW_REPAIR
					{
					repair=(Token)match(input,KW_REPAIR,FOLLOW_KW_REPAIR_in_metastoreCheck10209); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_REPAIR.add(repair);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1828:7: ( KW_TABLE tableName ( (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC ) (parts= KW_PARTITIONS ) )? | ( partitionSpec )? )
			int alt191=2;
			int LA191_0 = input.LA(1);
			if ( (LA191_0==KW_TABLE) ) {
				alt191=1;
			}
			else if ( (LA191_0==EOF||LA191_0==KW_PARTITION) ) {
				alt191=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 191, 0, input);
				throw nvae;
			}

			switch (alt191) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1828:8: KW_TABLE tableName ( (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC ) (parts= KW_PARTITIONS ) )?
					{
					KW_TABLE634=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_metastoreCheck10220); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE634);

					pushFollow(FOLLOW_tableName_in_metastoreCheck10222);
					tableName635=tableName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableName.add(tableName635.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:9: ( (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC ) (parts= KW_PARTITIONS ) )?
					int alt189=2;
					int LA189_0 = input.LA(1);
					if ( (LA189_0==KW_ADD||LA189_0==KW_DROP||LA189_0==KW_SYNC) ) {
						alt189=1;
					}
					switch (alt189) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:10: (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC ) (parts= KW_PARTITIONS )
							{
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:10: (add= KW_ADD |drop= KW_DROP |sync= KW_SYNC )
							int alt188=3;
							switch ( input.LA(1) ) {
							case KW_ADD:
								{
								alt188=1;
								}
								break;
							case KW_DROP:
								{
								alt188=2;
								}
								break;
							case KW_SYNC:
								{
								alt188=3;
								}
								break;
							default:
								if (state.backtracking>0) {state.failed=true; return retval;}
								NoViableAltException nvae =
									new NoViableAltException("", 188, 0, input);
								throw nvae;
							}
							switch (alt188) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:11: add= KW_ADD
									{
									add=(Token)match(input,KW_ADD,FOLLOW_KW_ADD_in_metastoreCheck10236); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_ADD.add(add);

									}
									break;
								case 2 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:24: drop= KW_DROP
									{
									drop=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_metastoreCheck10242); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_DROP.add(drop);

									}
									break;
								case 3 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:39: sync= KW_SYNC
									{
									sync=(Token)match(input,KW_SYNC,FOLLOW_KW_SYNC_in_metastoreCheck10248); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_KW_SYNC.add(sync);

									}
									break;

							}

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:53: (parts= KW_PARTITIONS )
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:54: parts= KW_PARTITIONS
							{
							parts=(Token)match(input,KW_PARTITIONS,FOLLOW_KW_PARTITIONS_in_metastoreCheck10254); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_PARTITIONS.add(parts);

							}

							}
							break;

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1830:9: ( partitionSpec )?
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1830:9: ( partitionSpec )?
					int alt190=2;
					int LA190_0 = input.LA(1);
					if ( (LA190_0==KW_PARTITION) ) {
						alt190=1;
					}
					switch (alt190) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:1830:10: partitionSpec
							{
							pushFollow(FOLLOW_partitionSpec_in_metastoreCheck10270);
							partitionSpec636=partitionSpec();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_partitionSpec.add(partitionSpec636.getTree());
							}
							break;

					}

					}
					break;

			}

			// AST REWRITE
			// elements: add, partitionSpec, drop, tableName, repair, sync
			// token labels: add, drop, repair, sync
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_add=new RewriteRuleTokenStream(adaptor,"token add",add);
			RewriteRuleTokenStream stream_drop=new RewriteRuleTokenStream(adaptor,"token drop",drop);
			RewriteRuleTokenStream stream_repair=new RewriteRuleTokenStream(adaptor,"token repair",repair);
			RewriteRuleTokenStream stream_sync=new RewriteRuleTokenStream(adaptor,"token sync",sync);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1831:5: -> ^( TOK_MSCK ( $repair)? ( tableName )? ( $add)? ( $drop)? ( $sync)? ( ( partitionSpec )* )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:8: ^( TOK_MSCK ( $repair)? ( tableName )? ( $add)? ( $drop)? ( $sync)? ( ( partitionSpec )* )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_MSCK, "TOK_MSCK"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:20: ( $repair)?
				if ( stream_repair.hasNext() ) {
					adaptor.addChild(root_1, stream_repair.nextNode());
				}
				stream_repair.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:28: ( tableName )?
				if ( stream_tableName.hasNext() ) {
					adaptor.addChild(root_1, stream_tableName.nextTree());
				}
				stream_tableName.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:40: ( $add)?
				if ( stream_add.hasNext() ) {
					adaptor.addChild(root_1, stream_add.nextNode());
				}
				stream_add.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:46: ( $drop)?
				if ( stream_drop.hasNext() ) {
					adaptor.addChild(root_1, stream_drop.nextNode());
				}
				stream_drop.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:53: ( $sync)?
				if ( stream_sync.hasNext() ) {
					adaptor.addChild(root_1, stream_sync.nextNode());
				}
				stream_sync.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:59: ( ( partitionSpec )* )?
				if ( stream_partitionSpec.hasNext() ) {
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:60: ( partitionSpec )*
					while ( stream_partitionSpec.hasNext() ) {
						adaptor.addChild(root_1, stream_partitionSpec.nextTree());
					}
					stream_partitionSpec.reset();

				}
				stream_partitionSpec.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "metastoreCheck"


	public static class resourceList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "resourceList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1834:1: resourceList : resource ( COMMA resource )* -> ^( TOK_RESOURCE_LIST ( resource )+ ) ;
	public final HiveParser.resourceList_return resourceList() throws RecognitionException {
		HiveParser.resourceList_return retval = new HiveParser.resourceList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA638=null;
		ParserRuleReturnScope resource637 =null;
		ParserRuleReturnScope resource639 =null;

		ASTNode COMMA638_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_resource=new RewriteRuleSubtreeStream(adaptor,"rule resource");

		 pushMsg("resource list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1837:3: ( resource ( COMMA resource )* -> ^( TOK_RESOURCE_LIST ( resource )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1838:3: resource ( COMMA resource )*
			{
			pushFollow(FOLLOW_resource_in_resourceList10335);
			resource637=resource();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_resource.add(resource637.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1838:12: ( COMMA resource )*
			loop192:
			while (true) {
				int alt192=2;
				int LA192_0 = input.LA(1);
				if ( (LA192_0==COMMA) ) {
					alt192=1;
				}

				switch (alt192) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1838:13: COMMA resource
					{
					COMMA638=(Token)match(input,COMMA,FOLLOW_COMMA_in_resourceList10338); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA638);

					pushFollow(FOLLOW_resource_in_resourceList10340);
					resource639=resource();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_resource.add(resource639.getTree());
					}
					break;

				default :
					break loop192;
				}
			}

			// AST REWRITE
			// elements: resource
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1838:30: -> ^( TOK_RESOURCE_LIST ( resource )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1838:33: ^( TOK_RESOURCE_LIST ( resource )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RESOURCE_LIST, "TOK_RESOURCE_LIST"), root_1);
				if ( !(stream_resource.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_resource.hasNext() ) {
					adaptor.addChild(root_1, stream_resource.nextTree());
				}
				stream_resource.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "resourceList"


	public static class resource_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "resource"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1841:1: resource : resType= resourceType resPath= StringLiteral -> ^( TOK_RESOURCE_URI $resType $resPath) ;
	public final HiveParser.resource_return resource() throws RecognitionException {
		HiveParser.resource_return retval = new HiveParser.resource_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token resPath=null;
		ParserRuleReturnScope resType =null;

		ASTNode resPath_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleSubtreeStream stream_resourceType=new RewriteRuleSubtreeStream(adaptor,"rule resourceType");

		 pushMsg("resource", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1844:3: (resType= resourceType resPath= StringLiteral -> ^( TOK_RESOURCE_URI $resType $resPath) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1845:3: resType= resourceType resPath= StringLiteral
			{
			pushFollow(FOLLOW_resourceType_in_resource10378);
			resType=resourceType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_resourceType.add(resType.getTree());
			resPath=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_resource10382); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(resPath);

			// AST REWRITE
			// elements: resType, resPath
			// token labels: resPath
			// rule labels: resType, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_resPath=new RewriteRuleTokenStream(adaptor,"token resPath",resPath);
			RewriteRuleSubtreeStream stream_resType=new RewriteRuleSubtreeStream(adaptor,"rule resType",resType!=null?resType.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1845:46: -> ^( TOK_RESOURCE_URI $resType $resPath)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1845:49: ^( TOK_RESOURCE_URI $resType $resPath)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RESOURCE_URI, "TOK_RESOURCE_URI"), root_1);
				adaptor.addChild(root_1, stream_resType.nextTree());
				adaptor.addChild(root_1, stream_resPath.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "resource"


	public static class resourceType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "resourceType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1848:1: resourceType : ( KW_JAR -> ^( TOK_JAR ) | KW_FILE -> ^( TOK_FILE ) | KW_ARCHIVE -> ^( TOK_ARCHIVE ) );
	public final HiveParser.resourceType_return resourceType() throws RecognitionException {
		HiveParser.resourceType_return retval = new HiveParser.resourceType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_JAR640=null;
		Token KW_FILE641=null;
		Token KW_ARCHIVE642=null;

		ASTNode KW_JAR640_tree=null;
		ASTNode KW_FILE641_tree=null;
		ASTNode KW_ARCHIVE642_tree=null;
		RewriteRuleTokenStream stream_KW_ARCHIVE=new RewriteRuleTokenStream(adaptor,"token KW_ARCHIVE");
		RewriteRuleTokenStream stream_KW_JAR=new RewriteRuleTokenStream(adaptor,"token KW_JAR");
		RewriteRuleTokenStream stream_KW_FILE=new RewriteRuleTokenStream(adaptor,"token KW_FILE");

		 pushMsg("resource type", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1851:3: ( KW_JAR -> ^( TOK_JAR ) | KW_FILE -> ^( TOK_FILE ) | KW_ARCHIVE -> ^( TOK_ARCHIVE ) )
			int alt193=3;
			switch ( input.LA(1) ) {
			case KW_JAR:
				{
				alt193=1;
				}
				break;
			case KW_FILE:
				{
				alt193=2;
				}
				break;
			case KW_ARCHIVE:
				{
				alt193=3;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 193, 0, input);
				throw nvae;
			}
			switch (alt193) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1852:3: KW_JAR
					{
					KW_JAR640=(Token)match(input,KW_JAR,FOLLOW_KW_JAR_in_resourceType10419); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_JAR.add(KW_JAR640);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1852:10: -> ^( TOK_JAR )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1852:13: ^( TOK_JAR )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_JAR, "TOK_JAR"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1854:3: KW_FILE
					{
					KW_FILE641=(Token)match(input,KW_FILE,FOLLOW_KW_FILE_in_resourceType10433); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_FILE.add(KW_FILE641);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1854:11: -> ^( TOK_FILE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1854:14: ^( TOK_FILE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FILE, "TOK_FILE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1856:3: KW_ARCHIVE
					{
					KW_ARCHIVE642=(Token)match(input,KW_ARCHIVE,FOLLOW_KW_ARCHIVE_in_resourceType10447); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ARCHIVE.add(KW_ARCHIVE642);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 1856:14: -> ^( TOK_ARCHIVE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:1856:17: ^( TOK_ARCHIVE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ARCHIVE, "TOK_ARCHIVE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "resourceType"


	public static class createFunctionStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createFunctionStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1859:1: createFunctionStatement : KW_CREATE (temp= KW_TEMPORARY )? KW_FUNCTION functionIdentifier KW_AS StringLiteral ( KW_USING rList= resourceList )? -> {$temp != null}? ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY ) -> ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? ) ;
	public final HiveParser.createFunctionStatement_return createFunctionStatement() throws RecognitionException {
		HiveParser.createFunctionStatement_return retval = new HiveParser.createFunctionStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token temp=null;
		Token KW_CREATE643=null;
		Token KW_FUNCTION644=null;
		Token KW_AS646=null;
		Token StringLiteral647=null;
		Token KW_USING648=null;
		ParserRuleReturnScope rList =null;
		ParserRuleReturnScope functionIdentifier645 =null;

		ASTNode temp_tree=null;
		ASTNode KW_CREATE643_tree=null;
		ASTNode KW_FUNCTION644_tree=null;
		ASTNode KW_AS646_tree=null;
		ASTNode StringLiteral647_tree=null;
		ASTNode KW_USING648_tree=null;
		RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_USING=new RewriteRuleTokenStream(adaptor,"token KW_USING");
		RewriteRuleTokenStream stream_KW_FUNCTION=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTION");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleSubtreeStream stream_functionIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule functionIdentifier");
		RewriteRuleSubtreeStream stream_resourceList=new RewriteRuleSubtreeStream(adaptor,"rule resourceList");

		 pushMsg("create function statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1862:5: ( KW_CREATE (temp= KW_TEMPORARY )? KW_FUNCTION functionIdentifier KW_AS StringLiteral ( KW_USING rList= resourceList )? -> {$temp != null}? ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY ) -> ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1862:7: KW_CREATE (temp= KW_TEMPORARY )? KW_FUNCTION functionIdentifier KW_AS StringLiteral ( KW_USING rList= resourceList )?
			{
			KW_CREATE643=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createFunctionStatement10478); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE643);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1862:17: (temp= KW_TEMPORARY )?
			int alt194=2;
			int LA194_0 = input.LA(1);
			if ( (LA194_0==KW_TEMPORARY) ) {
				alt194=1;
			}
			switch (alt194) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1862:18: temp= KW_TEMPORARY
					{
					temp=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_createFunctionStatement10483); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TEMPORARY.add(temp);

					}
					break;

			}

			KW_FUNCTION644=(Token)match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_createFunctionStatement10487); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FUNCTION.add(KW_FUNCTION644);

			pushFollow(FOLLOW_functionIdentifier_in_createFunctionStatement10489);
			functionIdentifier645=functionIdentifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_functionIdentifier.add(functionIdentifier645.getTree());
			KW_AS646=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createFunctionStatement10491); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS646);

			StringLiteral647=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_createFunctionStatement10493); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(StringLiteral647);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1863:7: ( KW_USING rList= resourceList )?
			int alt195=2;
			int LA195_0 = input.LA(1);
			if ( (LA195_0==KW_USING) ) {
				alt195=1;
			}
			switch (alt195) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1863:8: KW_USING rList= resourceList
					{
					KW_USING648=(Token)match(input,KW_USING,FOLLOW_KW_USING_in_createFunctionStatement10502); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_USING.add(KW_USING648);

					pushFollow(FOLLOW_resourceList_in_createFunctionStatement10506);
					rList=resourceList();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_resourceList.add(rList.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: StringLiteral, StringLiteral, rList, rList, functionIdentifier, functionIdentifier
			// token labels: 
			// rule labels: rList, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_rList=new RewriteRuleSubtreeStream(adaptor,"rule rList",rList!=null?rList.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1864:5: -> {$temp != null}? ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY )
			if (temp != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1864:25: ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATEFUNCTION, "TOK_CREATEFUNCTION"), root_1);
				adaptor.addChild(root_1, stream_functionIdentifier.nextTree());
				adaptor.addChild(root_1, stream_StringLiteral.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1864:80: ( $rList)?
				if ( stream_rList.hasNext() ) {
					adaptor.addChild(root_1, stream_rList.nextTree());
				}
				stream_rList.reset();

				adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_TEMPORARY, "TOK_TEMPORARY"));
				adaptor.addChild(root_0, root_1);
				}

			}

			else // 1865:5: -> ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1865:25: ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATEFUNCTION, "TOK_CREATEFUNCTION"), root_1);
				adaptor.addChild(root_1, stream_functionIdentifier.nextTree());
				adaptor.addChild(root_1, stream_StringLiteral.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1865:80: ( $rList)?
				if ( stream_rList.hasNext() ) {
					adaptor.addChild(root_1, stream_rList.nextTree());
				}
				stream_rList.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createFunctionStatement"


	public static class dropFunctionStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dropFunctionStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1868:1: dropFunctionStatement : KW_DROP (temp= KW_TEMPORARY )? KW_FUNCTION ( ifExists )? functionIdentifier -> {$temp != null}? ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY ) -> ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? ) ;
	public final HiveParser.dropFunctionStatement_return dropFunctionStatement() throws RecognitionException {
		HiveParser.dropFunctionStatement_return retval = new HiveParser.dropFunctionStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token temp=null;
		Token KW_DROP649=null;
		Token KW_FUNCTION650=null;
		ParserRuleReturnScope ifExists651 =null;
		ParserRuleReturnScope functionIdentifier652 =null;

		ASTNode temp_tree=null;
		ASTNode KW_DROP649_tree=null;
		ASTNode KW_FUNCTION650_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
		RewriteRuleTokenStream stream_KW_FUNCTION=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTION");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
		RewriteRuleSubtreeStream stream_functionIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule functionIdentifier");

		 pushMsg("drop function statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1871:5: ( KW_DROP (temp= KW_TEMPORARY )? KW_FUNCTION ( ifExists )? functionIdentifier -> {$temp != null}? ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY ) -> ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1871:7: KW_DROP (temp= KW_TEMPORARY )? KW_FUNCTION ( ifExists )? functionIdentifier
			{
			KW_DROP649=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropFunctionStatement10592); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP649);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1871:15: (temp= KW_TEMPORARY )?
			int alt196=2;
			int LA196_0 = input.LA(1);
			if ( (LA196_0==KW_TEMPORARY) ) {
				alt196=1;
			}
			switch (alt196) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1871:16: temp= KW_TEMPORARY
					{
					temp=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_dropFunctionStatement10597); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TEMPORARY.add(temp);

					}
					break;

			}

			KW_FUNCTION650=(Token)match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_dropFunctionStatement10601); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FUNCTION.add(KW_FUNCTION650);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1871:48: ( ifExists )?
			int alt197=2;
			int LA197_0 = input.LA(1);
			if ( (LA197_0==KW_IF) ) {
				alt197=1;
			}
			switch (alt197) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1871:48: ifExists
					{
					pushFollow(FOLLOW_ifExists_in_dropFunctionStatement10603);
					ifExists651=ifExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifExists.add(ifExists651.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_functionIdentifier_in_dropFunctionStatement10606);
			functionIdentifier652=functionIdentifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_functionIdentifier.add(functionIdentifier652.getTree());
			// AST REWRITE
			// elements: ifExists, functionIdentifier, functionIdentifier, ifExists
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1872:5: -> {$temp != null}? ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY )
			if (temp != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1872:25: ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROPFUNCTION, "TOK_DROPFUNCTION"), root_1);
				adaptor.addChild(root_1, stream_functionIdentifier.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1872:63: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_TEMPORARY, "TOK_TEMPORARY"));
				adaptor.addChild(root_0, root_1);
				}

			}

			else // 1873:5: -> ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1873:25: ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROPFUNCTION, "TOK_DROPFUNCTION"), root_1);
				adaptor.addChild(root_1, stream_functionIdentifier.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1873:63: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dropFunctionStatement"


	public static class reloadFunctionStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "reloadFunctionStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1876:1: reloadFunctionStatement : KW_RELOAD KW_FUNCTION -> ^( TOK_RELOADFUNCTION ) ;
	public final HiveParser.reloadFunctionStatement_return reloadFunctionStatement() throws RecognitionException {
		HiveParser.reloadFunctionStatement_return retval = new HiveParser.reloadFunctionStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_RELOAD653=null;
		Token KW_FUNCTION654=null;

		ASTNode KW_RELOAD653_tree=null;
		ASTNode KW_FUNCTION654_tree=null;
		RewriteRuleTokenStream stream_KW_FUNCTION=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTION");
		RewriteRuleTokenStream stream_KW_RELOAD=new RewriteRuleTokenStream(adaptor,"token KW_RELOAD");

		 pushMsg("reload function statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:5: ( KW_RELOAD KW_FUNCTION -> ^( TOK_RELOADFUNCTION ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:7: KW_RELOAD KW_FUNCTION
			{
			KW_RELOAD653=(Token)match(input,KW_RELOAD,FOLLOW_KW_RELOAD_in_reloadFunctionStatement10684); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_RELOAD.add(KW_RELOAD653);

			KW_FUNCTION654=(Token)match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_reloadFunctionStatement10686); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FUNCTION.add(KW_FUNCTION654);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1879:29: -> ^( TOK_RELOADFUNCTION )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:32: ^( TOK_RELOADFUNCTION )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RELOADFUNCTION, "TOK_RELOADFUNCTION"), root_1);
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "reloadFunctionStatement"


	public static class createMacroStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createMacroStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1881:1: createMacroStatement : KW_CREATE KW_TEMPORARY KW_MACRO Identifier LPAREN ( columnNameTypeList )? RPAREN expression -> ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression ) ;
	public final HiveParser.createMacroStatement_return createMacroStatement() throws RecognitionException {
		HiveParser.createMacroStatement_return retval = new HiveParser.createMacroStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CREATE655=null;
		Token KW_TEMPORARY656=null;
		Token KW_MACRO657=null;
		Token Identifier658=null;
		Token LPAREN659=null;
		Token RPAREN661=null;
		ParserRuleReturnScope columnNameTypeList660 =null;
		ParserRuleReturnScope expression662 =null;

		ASTNode KW_CREATE655_tree=null;
		ASTNode KW_TEMPORARY656_tree=null;
		ASTNode KW_MACRO657_tree=null;
		ASTNode Identifier658_tree=null;
		ASTNode LPAREN659_tree=null;
		ASTNode RPAREN661_tree=null;
		RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
		RewriteRuleTokenStream stream_Identifier=new RewriteRuleTokenStream(adaptor,"token Identifier");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_KW_MACRO=new RewriteRuleTokenStream(adaptor,"token KW_MACRO");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");
		RewriteRuleSubtreeStream stream_columnNameTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeList");

		 pushMsg("create macro statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1884:5: ( KW_CREATE KW_TEMPORARY KW_MACRO Identifier LPAREN ( columnNameTypeList )? RPAREN expression -> ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1884:7: KW_CREATE KW_TEMPORARY KW_MACRO Identifier LPAREN ( columnNameTypeList )? RPAREN expression
			{
			KW_CREATE655=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createMacroStatement10714); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE655);

			KW_TEMPORARY656=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_createMacroStatement10716); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TEMPORARY.add(KW_TEMPORARY656);

			KW_MACRO657=(Token)match(input,KW_MACRO,FOLLOW_KW_MACRO_in_createMacroStatement10718); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MACRO.add(KW_MACRO657);

			Identifier658=(Token)match(input,Identifier,FOLLOW_Identifier_in_createMacroStatement10720); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_Identifier.add(Identifier658);

			LPAREN659=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_createMacroStatement10728); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN659);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1885:14: ( columnNameTypeList )?
			int alt198=2;
			int LA198_0 = input.LA(1);
			if ( (LA198_0==Identifier||(LA198_0 >= KW_ABORT && LA198_0 <= KW_AFTER)||LA198_0==KW_ALLOC_FRACTION||LA198_0==KW_ANALYZE||LA198_0==KW_ARCHIVE||LA198_0==KW_ASC||(LA198_0 >= KW_AUTOCOMMIT && LA198_0 <= KW_BEFORE)||(LA198_0 >= KW_BUCKET && LA198_0 <= KW_BUCKETS)||(LA198_0 >= KW_CACHE && LA198_0 <= KW_CASCADE)||LA198_0==KW_CHANGE||(LA198_0 >= KW_CHECK && LA198_0 <= KW_COLLECTION)||(LA198_0 >= KW_COLUMNS && LA198_0 <= KW_COMMENT)||(LA198_0 >= KW_COMPACT && LA198_0 <= KW_CONCATENATE)||LA198_0==KW_CONTINUE||LA198_0==KW_DATA||LA198_0==KW_DATABASES||(LA198_0 >= KW_DATETIME && LA198_0 <= KW_DBPROPERTIES)||(LA198_0 >= KW_DEFAULT && LA198_0 <= KW_DEFINED)||(LA198_0 >= KW_DELIMITED && LA198_0 <= KW_DESC)||(LA198_0 >= KW_DETAIL && LA198_0 <= KW_DISABLE)||(LA198_0 >= KW_DISTRIBUTE && LA198_0 <= KW_DO)||LA198_0==KW_DOW||(LA198_0 >= KW_DUMP && LA198_0 <= KW_ELEM_TYPE)||LA198_0==KW_ENABLE||(LA198_0 >= KW_ENFORCED && LA198_0 <= KW_ESCAPED)||LA198_0==KW_EXCLUSIVE||(LA198_0 >= KW_EXPLAIN && LA198_0 <= KW_EXPRESSION)||(LA198_0 >= KW_FIELDS && LA198_0 <= KW_FIRST)||(LA198_0 >= KW_FORMAT && LA198_0 <= KW_FORMATTED)||LA198_0==KW_FUNCTIONS||(LA198_0 >= KW_HOUR && LA198_0 <= KW_IDXPROPERTIES)||(LA198_0 >= KW_INDEX && LA198_0 <= KW_INDEXES)||(LA198_0 >= KW_INPATH && LA198_0 <= KW_INPUTFORMAT)||(LA198_0 >= KW_ISOLATION && LA198_0 <= KW_JAR)||(LA198_0 >= KW_KEY && LA198_0 <= KW_LAST)||LA198_0==KW_LEVEL||(LA198_0 >= KW_LIMIT && LA198_0 <= KW_LOAD)||(LA198_0 >= KW_LOCATION && LA198_0 <= KW_LONG)||LA198_0==KW_MANAGEMENT||(LA198_0 >= KW_MAPJOIN && LA198_0 <= KW_MATERIALIZED)||LA198_0==KW_METADATA||(LA198_0 >= KW_MINUTE && LA198_0 <= KW_MONTH)||(LA198_0 >= KW_MOVE && LA198_0 <= KW_MSCK)||(LA198_0 >= KW_NORELY && LA198_0 <= KW_NOSCAN)||LA198_0==KW_NOVALIDATE||LA198_0==KW_NULLS||LA198_0==KW_OFFSET||(LA198_0 >= KW_OPERATOR && LA198_0 <= KW_OPTION)||(LA198_0 >= KW_OUTPUTDRIVER && LA198_0 <= KW_OUTPUTFORMAT)||(LA198_0 >= KW_OVERWRITE && LA198_0 <= KW_OWNER)||(LA198_0 >= KW_PARTITIONED && LA198_0 <= KW_PATH)||(LA198_0 >= KW_PLAN && LA198_0 <= KW_POOL)||LA198_0==KW_PRINCIPALS||(LA198_0 >= KW_PURGE && LA198_0 <= KW_QUERY_PARALLELISM)||LA198_0==KW_READ||(LA198_0 >= KW_REBUILD && LA198_0 <= KW_RECORDWRITER)||(LA198_0 >= KW_RELOAD && LA198_0 <= KW_RESTRICT)||LA198_0==KW_REWRITE||(LA198_0 >= KW_ROLE && LA198_0 <= KW_ROLES)||(LA198_0 >= KW_SCHEDULING_POLICY && LA198_0 <= KW_SECOND)||(LA198_0 >= KW_SEMI && LA198_0 <= KW_SERVER)||(LA198_0 >= KW_SETS && LA198_0 <= KW_SKEWED)||(LA198_0 >= KW_SNAPSHOT && LA198_0 <= KW_SSL)||(LA198_0 >= KW_STATISTICS && LA198_0 <= KW_SUMMARY)||LA198_0==KW_TABLES||(LA198_0 >= KW_TBLPROPERTIES && LA198_0 <= KW_TERMINATED)||LA198_0==KW_TINYINT||(LA198_0 >= KW_TOUCH && LA198_0 <= KW_TRANSACTIONS)||LA198_0==KW_UNARCHIVE||LA198_0==KW_UNDO||LA198_0==KW_UNIONTYPE||(LA198_0 >= KW_UNLOCK && LA198_0 <= KW_UNSIGNED)||(LA198_0 >= KW_URI && LA198_0 <= KW_USE)||(LA198_0 >= KW_UTC && LA198_0 <= KW_VALIDATE)||LA198_0==KW_VALUE_TYPE||(LA198_0 >= KW_VECTORIZATION && LA198_0 <= KW_WEEK)||LA198_0==KW_WHILE||(LA198_0 >= KW_WORK && LA198_0 <= KW_ZONE)||LA198_0==KW_BATCH||LA198_0==KW_DAYOFWEEK||LA198_0==KW_HOLD_DDLTIME||LA198_0==KW_IGNORE||LA198_0==KW_NO_DROP||LA198_0==KW_OFFLINE||LA198_0==KW_PROTECTION||LA198_0==KW_READONLY||LA198_0==KW_TIMESTAMPTZ) ) {
				alt198=1;
			}
			switch (alt198) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1885:14: columnNameTypeList
					{
					pushFollow(FOLLOW_columnNameTypeList_in_createMacroStatement10730);
					columnNameTypeList660=columnNameTypeList();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameTypeList.add(columnNameTypeList660.getTree());
					}
					break;

			}

			RPAREN661=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_createMacroStatement10733); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN661);

			pushFollow(FOLLOW_expression_in_createMacroStatement10735);
			expression662=expression();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_expression.add(expression662.getTree());
			// AST REWRITE
			// elements: Identifier, columnNameTypeList, expression
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1886:5: -> ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1886:8: ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATEMACRO, "TOK_CREATEMACRO"), root_1);
				adaptor.addChild(root_1, stream_Identifier.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1886:37: ( columnNameTypeList )?
				if ( stream_columnNameTypeList.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());
				}
				stream_columnNameTypeList.reset();

				adaptor.addChild(root_1, stream_expression.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createMacroStatement"


	public static class dropMacroStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dropMacroStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1889:1: dropMacroStatement : KW_DROP KW_TEMPORARY KW_MACRO ( ifExists )? Identifier -> ^( TOK_DROPMACRO Identifier ( ifExists )? ) ;
	public final HiveParser.dropMacroStatement_return dropMacroStatement() throws RecognitionException {
		HiveParser.dropMacroStatement_return retval = new HiveParser.dropMacroStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP663=null;
		Token KW_TEMPORARY664=null;
		Token KW_MACRO665=null;
		Token Identifier667=null;
		ParserRuleReturnScope ifExists666 =null;

		ASTNode KW_DROP663_tree=null;
		ASTNode KW_TEMPORARY664_tree=null;
		ASTNode KW_MACRO665_tree=null;
		ASTNode Identifier667_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
		RewriteRuleTokenStream stream_Identifier=new RewriteRuleTokenStream(adaptor,"token Identifier");
		RewriteRuleTokenStream stream_KW_MACRO=new RewriteRuleTokenStream(adaptor,"token KW_MACRO");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");

		 pushMsg("drop macro statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1892:5: ( KW_DROP KW_TEMPORARY KW_MACRO ( ifExists )? Identifier -> ^( TOK_DROPMACRO Identifier ( ifExists )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1892:7: KW_DROP KW_TEMPORARY KW_MACRO ( ifExists )? Identifier
			{
			KW_DROP663=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropMacroStatement10779); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP663);

			KW_TEMPORARY664=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_dropMacroStatement10781); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TEMPORARY.add(KW_TEMPORARY664);

			KW_MACRO665=(Token)match(input,KW_MACRO,FOLLOW_KW_MACRO_in_dropMacroStatement10783); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MACRO.add(KW_MACRO665);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1892:37: ( ifExists )?
			int alt199=2;
			int LA199_0 = input.LA(1);
			if ( (LA199_0==KW_IF) ) {
				alt199=1;
			}
			switch (alt199) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1892:37: ifExists
					{
					pushFollow(FOLLOW_ifExists_in_dropMacroStatement10785);
					ifExists666=ifExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifExists.add(ifExists666.getTree());
					}
					break;

			}

			Identifier667=(Token)match(input,Identifier,FOLLOW_Identifier_in_dropMacroStatement10788); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_Identifier.add(Identifier667);

			// AST REWRITE
			// elements: ifExists, Identifier
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1893:5: -> ^( TOK_DROPMACRO Identifier ( ifExists )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1893:8: ^( TOK_DROPMACRO Identifier ( ifExists )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROPMACRO, "TOK_DROPMACRO"), root_1);
				adaptor.addChild(root_1, stream_Identifier.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1893:35: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dropMacroStatement"


	public static class createViewStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createViewStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1896:1: createViewStatement : KW_CREATE ( orReplace )? KW_VIEW ( ifNotExists )? name= tableName ( LPAREN columnNameCommentList RPAREN )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE -> ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE ) ;
	public final HiveParser.createViewStatement_return createViewStatement() throws RecognitionException {
		HiveParser.createViewStatement_return retval = new HiveParser.createViewStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CREATE668=null;
		Token KW_VIEW670=null;
		Token LPAREN672=null;
		Token RPAREN674=null;
		Token KW_AS678=null;
		ParserRuleReturnScope name =null;
		ParserRuleReturnScope orReplace669 =null;
		ParserRuleReturnScope ifNotExists671 =null;
		ParserRuleReturnScope columnNameCommentList673 =null;
		ParserRuleReturnScope tableComment675 =null;
		ParserRuleReturnScope viewPartition676 =null;
		ParserRuleReturnScope tablePropertiesPrefixed677 =null;
		ParserRuleReturnScope selectStatementWithCTE679 =null;

		ASTNode KW_CREATE668_tree=null;
		ASTNode KW_VIEW670_tree=null;
		ASTNode LPAREN672_tree=null;
		ASTNode RPAREN674_tree=null;
		ASTNode KW_AS678_tree=null;
		RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleSubtreeStream stream_columnNameCommentList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameCommentList");
		RewriteRuleSubtreeStream stream_selectStatementWithCTE=new RewriteRuleSubtreeStream(adaptor,"rule selectStatementWithCTE");
		RewriteRuleSubtreeStream stream_orReplace=new RewriteRuleSubtreeStream(adaptor,"rule orReplace");
		RewriteRuleSubtreeStream stream_tablePropertiesPrefixed=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesPrefixed");
		RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
		RewriteRuleSubtreeStream stream_tableComment=new RewriteRuleSubtreeStream(adaptor,"rule tableComment");
		RewriteRuleSubtreeStream stream_viewPartition=new RewriteRuleSubtreeStream(adaptor,"rule viewPartition");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");


		    pushMsg("create view statement", state);

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1901:5: ( KW_CREATE ( orReplace )? KW_VIEW ( ifNotExists )? name= tableName ( LPAREN columnNameCommentList RPAREN )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE -> ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1901:7: KW_CREATE ( orReplace )? KW_VIEW ( ifNotExists )? name= tableName ( LPAREN columnNameCommentList RPAREN )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE
			{
			KW_CREATE668=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createViewStatement10830); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE668);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1901:17: ( orReplace )?
			int alt200=2;
			int LA200_0 = input.LA(1);
			if ( (LA200_0==KW_OR) ) {
				alt200=1;
			}
			switch (alt200) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1901:18: orReplace
					{
					pushFollow(FOLLOW_orReplace_in_createViewStatement10833);
					orReplace669=orReplace();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_orReplace.add(orReplace669.getTree());
					}
					break;

			}

			KW_VIEW670=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_createViewStatement10837); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_VIEW.add(KW_VIEW670);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1901:38: ( ifNotExists )?
			int alt201=2;
			int LA201_0 = input.LA(1);
			if ( (LA201_0==KW_IF) ) {
				alt201=1;
			}
			switch (alt201) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1901:39: ifNotExists
					{
					pushFollow(FOLLOW_ifNotExists_in_createViewStatement10840);
					ifNotExists671=ifNotExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifNotExists.add(ifNotExists671.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_tableName_in_createViewStatement10846);
			name=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(name.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1902:9: ( LPAREN columnNameCommentList RPAREN )?
			int alt202=2;
			int LA202_0 = input.LA(1);
			if ( (LA202_0==LPAREN) ) {
				alt202=1;
			}
			switch (alt202) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1902:10: LPAREN columnNameCommentList RPAREN
					{
					LPAREN672=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_createViewStatement10857); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN672);

					pushFollow(FOLLOW_columnNameCommentList_in_createViewStatement10859);
					columnNameCommentList673=columnNameCommentList();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameCommentList.add(columnNameCommentList673.getTree());
					RPAREN674=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_createViewStatement10861); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN674);

					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1902:48: ( tableComment )?
			int alt203=2;
			int LA203_0 = input.LA(1);
			if ( (LA203_0==KW_COMMENT) ) {
				alt203=1;
			}
			switch (alt203) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1902:48: tableComment
					{
					pushFollow(FOLLOW_tableComment_in_createViewStatement10865);
					tableComment675=tableComment();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableComment.add(tableComment675.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1902:62: ( viewPartition )?
			int alt204=2;
			int LA204_0 = input.LA(1);
			if ( (LA204_0==KW_PARTITIONED) ) {
				alt204=1;
			}
			switch (alt204) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1902:62: viewPartition
					{
					pushFollow(FOLLOW_viewPartition_in_createViewStatement10868);
					viewPartition676=viewPartition();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_viewPartition.add(viewPartition676.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1903:9: ( tablePropertiesPrefixed )?
			int alt205=2;
			int LA205_0 = input.LA(1);
			if ( (LA205_0==KW_TBLPROPERTIES) ) {
				alt205=1;
			}
			switch (alt205) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1903:9: tablePropertiesPrefixed
					{
					pushFollow(FOLLOW_tablePropertiesPrefixed_in_createViewStatement10879);
					tablePropertiesPrefixed677=tablePropertiesPrefixed();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed677.getTree());
					}
					break;

			}

			KW_AS678=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createViewStatement10890); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS678);

			pushFollow(FOLLOW_selectStatementWithCTE_in_createViewStatement10900);
			selectStatementWithCTE679=selectStatementWithCTE();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_selectStatementWithCTE.add(selectStatementWithCTE679.getTree());
			// AST REWRITE
			// elements: ifNotExists, tablePropertiesPrefixed, selectStatementWithCTE, viewPartition, orReplace, tableComment, columnNameCommentList, name
			// token labels: 
			// rule labels: name, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1906:5: -> ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1906:8: ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATEVIEW, "TOK_CREATEVIEW"), root_1);
				adaptor.addChild(root_1, stream_name.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1906:31: ( orReplace )?
				if ( stream_orReplace.hasNext() ) {
					adaptor.addChild(root_1, stream_orReplace.nextTree());
				}
				stream_orReplace.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1907:10: ( ifNotExists )?
				if ( stream_ifNotExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifNotExists.nextTree());
				}
				stream_ifNotExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1908:10: ( columnNameCommentList )?
				if ( stream_columnNameCommentList.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameCommentList.nextTree());
				}
				stream_columnNameCommentList.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1909:10: ( tableComment )?
				if ( stream_tableComment.hasNext() ) {
					adaptor.addChild(root_1, stream_tableComment.nextTree());
				}
				stream_tableComment.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1910:10: ( viewPartition )?
				if ( stream_viewPartition.hasNext() ) {
					adaptor.addChild(root_1, stream_viewPartition.nextTree());
				}
				stream_viewPartition.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1911:10: ( tablePropertiesPrefixed )?
				if ( stream_tablePropertiesPrefixed.hasNext() ) {
					adaptor.addChild(root_1, stream_tablePropertiesPrefixed.nextTree());
				}
				stream_tablePropertiesPrefixed.reset();

				adaptor.addChild(root_1, stream_selectStatementWithCTE.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createViewStatement"


	public static class createMaterializedViewStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createMaterializedViewStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1916:1: createMaterializedViewStatement : KW_CREATE KW_MATERIALIZED KW_VIEW ( ifNotExists )? name= tableName ( rewriteEnabled )? ( tableComment )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE -> ^( TOK_CREATE_MATERIALIZED_VIEW $name ( ifNotExists )? ( rewriteEnabled )? ( tableComment )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? selectStatementWithCTE ) ;
	public final HiveParser.createMaterializedViewStatement_return createMaterializedViewStatement() throws RecognitionException {
		HiveParser.createMaterializedViewStatement_return retval = new HiveParser.createMaterializedViewStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CREATE680=null;
		Token KW_MATERIALIZED681=null;
		Token KW_VIEW682=null;
		Token KW_AS690=null;
		ParserRuleReturnScope name =null;
		ParserRuleReturnScope ifNotExists683 =null;
		ParserRuleReturnScope rewriteEnabled684 =null;
		ParserRuleReturnScope tableComment685 =null;
		ParserRuleReturnScope tableRowFormat686 =null;
		ParserRuleReturnScope tableFileFormat687 =null;
		ParserRuleReturnScope tableLocation688 =null;
		ParserRuleReturnScope tablePropertiesPrefixed689 =null;
		ParserRuleReturnScope selectStatementWithCTE691 =null;

		ASTNode KW_CREATE680_tree=null;
		ASTNode KW_MATERIALIZED681_tree=null;
		ASTNode KW_VIEW682_tree=null;
		ASTNode KW_AS690_tree=null;
		RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
		RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
		RewriteRuleTokenStream stream_KW_MATERIALIZED=new RewriteRuleTokenStream(adaptor,"token KW_MATERIALIZED");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleSubtreeStream stream_tableRowFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormat");
		RewriteRuleSubtreeStream stream_selectStatementWithCTE=new RewriteRuleSubtreeStream(adaptor,"rule selectStatementWithCTE");
		RewriteRuleSubtreeStream stream_tableLocation=new RewriteRuleSubtreeStream(adaptor,"rule tableLocation");
		RewriteRuleSubtreeStream stream_rewriteEnabled=new RewriteRuleSubtreeStream(adaptor,"rule rewriteEnabled");
		RewriteRuleSubtreeStream stream_tablePropertiesPrefixed=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesPrefixed");
		RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
		RewriteRuleSubtreeStream stream_tableFileFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableFileFormat");
		RewriteRuleSubtreeStream stream_tableComment=new RewriteRuleSubtreeStream(adaptor,"rule tableComment");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");


		    pushMsg("create materialized view statement", state);

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1921:5: ( KW_CREATE KW_MATERIALIZED KW_VIEW ( ifNotExists )? name= tableName ( rewriteEnabled )? ( tableComment )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE -> ^( TOK_CREATE_MATERIALIZED_VIEW $name ( ifNotExists )? ( rewriteEnabled )? ( tableComment )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? selectStatementWithCTE ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1921:7: KW_CREATE KW_MATERIALIZED KW_VIEW ( ifNotExists )? name= tableName ( rewriteEnabled )? ( tableComment )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE
			{
			KW_CREATE680=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createMaterializedViewStatement11023); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CREATE.add(KW_CREATE680);

			KW_MATERIALIZED681=(Token)match(input,KW_MATERIALIZED,FOLLOW_KW_MATERIALIZED_in_createMaterializedViewStatement11025); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MATERIALIZED.add(KW_MATERIALIZED681);

			KW_VIEW682=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_createMaterializedViewStatement11027); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_VIEW.add(KW_VIEW682);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1921:41: ( ifNotExists )?
			int alt206=2;
			int LA206_0 = input.LA(1);
			if ( (LA206_0==KW_IF) ) {
				alt206=1;
			}
			switch (alt206) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1921:42: ifNotExists
					{
					pushFollow(FOLLOW_ifNotExists_in_createMaterializedViewStatement11030);
					ifNotExists683=ifNotExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifNotExists.add(ifNotExists683.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_tableName_in_createMaterializedViewStatement11036);
			name=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(name.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:9: ( rewriteEnabled )?
			int alt207=2;
			int LA207_0 = input.LA(1);
			if ( (LA207_0==KW_ENABLE) ) {
				alt207=1;
			}
			switch (alt207) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:9: rewriteEnabled
					{
					pushFollow(FOLLOW_rewriteEnabled_in_createMaterializedViewStatement11046);
					rewriteEnabled684=rewriteEnabled();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_rewriteEnabled.add(rewriteEnabled684.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:25: ( tableComment )?
			int alt208=2;
			int LA208_0 = input.LA(1);
			if ( (LA208_0==KW_COMMENT) ) {
				alt208=1;
			}
			switch (alt208) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:25: tableComment
					{
					pushFollow(FOLLOW_tableComment_in_createMaterializedViewStatement11049);
					tableComment685=tableComment();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableComment.add(tableComment685.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:39: ( tableRowFormat )?
			int alt209=2;
			int LA209_0 = input.LA(1);
			if ( (LA209_0==KW_ROW) ) {
				alt209=1;
			}
			switch (alt209) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:39: tableRowFormat
					{
					pushFollow(FOLLOW_tableRowFormat_in_createMaterializedViewStatement11052);
					tableRowFormat686=tableRowFormat();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableRowFormat.add(tableRowFormat686.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:55: ( tableFileFormat )?
			int alt210=2;
			int LA210_0 = input.LA(1);
			if ( (LA210_0==KW_STORED) ) {
				alt210=1;
			}
			switch (alt210) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:55: tableFileFormat
					{
					pushFollow(FOLLOW_tableFileFormat_in_createMaterializedViewStatement11055);
					tableFileFormat687=tableFileFormat();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableFileFormat.add(tableFileFormat687.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:72: ( tableLocation )?
			int alt211=2;
			int LA211_0 = input.LA(1);
			if ( (LA211_0==KW_LOCATION) ) {
				alt211=1;
			}
			switch (alt211) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:72: tableLocation
					{
					pushFollow(FOLLOW_tableLocation_in_createMaterializedViewStatement11058);
					tableLocation688=tableLocation();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableLocation.add(tableLocation688.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1923:9: ( tablePropertiesPrefixed )?
			int alt212=2;
			int LA212_0 = input.LA(1);
			if ( (LA212_0==KW_TBLPROPERTIES) ) {
				alt212=1;
			}
			switch (alt212) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1923:9: tablePropertiesPrefixed
					{
					pushFollow(FOLLOW_tablePropertiesPrefixed_in_createMaterializedViewStatement11069);
					tablePropertiesPrefixed689=tablePropertiesPrefixed();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed689.getTree());
					}
					break;

			}

			KW_AS690=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createMaterializedViewStatement11072); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS690);

			pushFollow(FOLLOW_selectStatementWithCTE_in_createMaterializedViewStatement11074);
			selectStatementWithCTE691=selectStatementWithCTE();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_selectStatementWithCTE.add(selectStatementWithCTE691.getTree());
			// AST REWRITE
			// elements: tableComment, tableRowFormat, tableFileFormat, rewriteEnabled, tablePropertiesPrefixed, selectStatementWithCTE, ifNotExists, name, tableLocation
			// token labels: 
			// rule labels: name, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1924:5: -> ^( TOK_CREATE_MATERIALIZED_VIEW $name ( ifNotExists )? ( rewriteEnabled )? ( tableComment )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? selectStatementWithCTE )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1924:8: ^( TOK_CREATE_MATERIALIZED_VIEW $name ( ifNotExists )? ( rewriteEnabled )? ( tableComment )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? selectStatementWithCTE )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CREATE_MATERIALIZED_VIEW, "TOK_CREATE_MATERIALIZED_VIEW"), root_1);
				adaptor.addChild(root_1, stream_name.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1925:10: ( ifNotExists )?
				if ( stream_ifNotExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifNotExists.nextTree());
				}
				stream_ifNotExists.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1926:10: ( rewriteEnabled )?
				if ( stream_rewriteEnabled.hasNext() ) {
					adaptor.addChild(root_1, stream_rewriteEnabled.nextTree());
				}
				stream_rewriteEnabled.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1927:10: ( tableComment )?
				if ( stream_tableComment.hasNext() ) {
					adaptor.addChild(root_1, stream_tableComment.nextTree());
				}
				stream_tableComment.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1928:10: ( tableRowFormat )?
				if ( stream_tableRowFormat.hasNext() ) {
					adaptor.addChild(root_1, stream_tableRowFormat.nextTree());
				}
				stream_tableRowFormat.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1929:10: ( tableFileFormat )?
				if ( stream_tableFileFormat.hasNext() ) {
					adaptor.addChild(root_1, stream_tableFileFormat.nextTree());
				}
				stream_tableFileFormat.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1930:10: ( tableLocation )?
				if ( stream_tableLocation.hasNext() ) {
					adaptor.addChild(root_1, stream_tableLocation.nextTree());
				}
				stream_tableLocation.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1931:10: ( tablePropertiesPrefixed )?
				if ( stream_tablePropertiesPrefixed.hasNext() ) {
					adaptor.addChild(root_1, stream_tablePropertiesPrefixed.nextTree());
				}
				stream_tablePropertiesPrefixed.reset();

				adaptor.addChild(root_1, stream_selectStatementWithCTE.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createMaterializedViewStatement"


	public static class viewPartition_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "viewPartition"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1936:1: viewPartition : KW_PARTITIONED KW_ON LPAREN columnNameList RPAREN -> ^( TOK_VIEWPARTCOLS columnNameList ) ;
	public final HiveParser.viewPartition_return viewPartition() throws RecognitionException {
		HiveParser.viewPartition_return retval = new HiveParser.viewPartition_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_PARTITIONED692=null;
		Token KW_ON693=null;
		Token LPAREN694=null;
		Token RPAREN696=null;
		ParserRuleReturnScope columnNameList695 =null;

		ASTNode KW_PARTITIONED692_tree=null;
		ASTNode KW_ON693_tree=null;
		ASTNode LPAREN694_tree=null;
		ASTNode RPAREN696_tree=null;
		RewriteRuleTokenStream stream_KW_PARTITIONED=new RewriteRuleTokenStream(adaptor,"token KW_PARTITIONED");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");

		 pushMsg("view partition specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1939:5: ( KW_PARTITIONED KW_ON LPAREN columnNameList RPAREN -> ^( TOK_VIEWPARTCOLS columnNameList ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1939:7: KW_PARTITIONED KW_ON LPAREN columnNameList RPAREN
			{
			KW_PARTITIONED692=(Token)match(input,KW_PARTITIONED,FOLLOW_KW_PARTITIONED_in_viewPartition11218); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_PARTITIONED.add(KW_PARTITIONED692);

			KW_ON693=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_viewPartition11220); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ON.add(KW_ON693);

			LPAREN694=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_viewPartition11222); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN694);

			pushFollow(FOLLOW_columnNameList_in_viewPartition11224);
			columnNameList695=columnNameList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameList.add(columnNameList695.getTree());
			RPAREN696=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_viewPartition11226); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN696);

			// AST REWRITE
			// elements: columnNameList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1940:5: -> ^( TOK_VIEWPARTCOLS columnNameList )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1940:8: ^( TOK_VIEWPARTCOLS columnNameList )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_VIEWPARTCOLS, "TOK_VIEWPARTCOLS"), root_1);
				adaptor.addChild(root_1, stream_columnNameList.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "viewPartition"


	public static class dropViewStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dropViewStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1943:1: dropViewStatement : KW_DROP KW_VIEW ( ifExists )? viewName -> ^( TOK_DROPVIEW viewName ( ifExists )? ) ;
	public final HiveParser.dropViewStatement_return dropViewStatement() throws RecognitionException {
		HiveParser.dropViewStatement_return retval = new HiveParser.dropViewStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP697=null;
		Token KW_VIEW698=null;
		ParserRuleReturnScope ifExists699 =null;
		ParserRuleReturnScope viewName700 =null;

		ASTNode KW_DROP697_tree=null;
		ASTNode KW_VIEW698_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
		RewriteRuleSubtreeStream stream_viewName=new RewriteRuleSubtreeStream(adaptor,"rule viewName");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");

		 pushMsg("drop view statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1946:5: ( KW_DROP KW_VIEW ( ifExists )? viewName -> ^( TOK_DROPVIEW viewName ( ifExists )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1946:7: KW_DROP KW_VIEW ( ifExists )? viewName
			{
			KW_DROP697=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropViewStatement11265); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP697);

			KW_VIEW698=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_dropViewStatement11267); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_VIEW.add(KW_VIEW698);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1946:23: ( ifExists )?
			int alt213=2;
			int LA213_0 = input.LA(1);
			if ( (LA213_0==KW_IF) ) {
				alt213=1;
			}
			switch (alt213) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1946:23: ifExists
					{
					pushFollow(FOLLOW_ifExists_in_dropViewStatement11269);
					ifExists699=ifExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifExists.add(ifExists699.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_viewName_in_dropViewStatement11272);
			viewName700=viewName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_viewName.add(viewName700.getTree());
			// AST REWRITE
			// elements: viewName, ifExists
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1946:42: -> ^( TOK_DROPVIEW viewName ( ifExists )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1946:45: ^( TOK_DROPVIEW viewName ( ifExists )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROPVIEW, "TOK_DROPVIEW"), root_1);
				adaptor.addChild(root_1, stream_viewName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1946:69: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dropViewStatement"


	public static class dropMaterializedViewStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "dropMaterializedViewStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1949:1: dropMaterializedViewStatement : KW_DROP KW_MATERIALIZED KW_VIEW ( ifExists )? viewName -> ^( TOK_DROP_MATERIALIZED_VIEW viewName ( ifExists )? ) ;
	public final HiveParser.dropMaterializedViewStatement_return dropMaterializedViewStatement() throws RecognitionException {
		HiveParser.dropMaterializedViewStatement_return retval = new HiveParser.dropMaterializedViewStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DROP701=null;
		Token KW_MATERIALIZED702=null;
		Token KW_VIEW703=null;
		ParserRuleReturnScope ifExists704 =null;
		ParserRuleReturnScope viewName705 =null;

		ASTNode KW_DROP701_tree=null;
		ASTNode KW_MATERIALIZED702_tree=null;
		ASTNode KW_VIEW703_tree=null;
		RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
		RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
		RewriteRuleTokenStream stream_KW_MATERIALIZED=new RewriteRuleTokenStream(adaptor,"token KW_MATERIALIZED");
		RewriteRuleSubtreeStream stream_viewName=new RewriteRuleSubtreeStream(adaptor,"rule viewName");
		RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");

		 pushMsg("drop materialized view statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1952:5: ( KW_DROP KW_MATERIALIZED KW_VIEW ( ifExists )? viewName -> ^( TOK_DROP_MATERIALIZED_VIEW viewName ( ifExists )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1952:7: KW_DROP KW_MATERIALIZED KW_VIEW ( ifExists )? viewName
			{
			KW_DROP701=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropMaterializedViewStatement11310); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DROP.add(KW_DROP701);

			KW_MATERIALIZED702=(Token)match(input,KW_MATERIALIZED,FOLLOW_KW_MATERIALIZED_in_dropMaterializedViewStatement11312); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MATERIALIZED.add(KW_MATERIALIZED702);

			KW_VIEW703=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_dropMaterializedViewStatement11314); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_VIEW.add(KW_VIEW703);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1952:39: ( ifExists )?
			int alt214=2;
			int LA214_0 = input.LA(1);
			if ( (LA214_0==KW_IF) ) {
				alt214=1;
			}
			switch (alt214) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1952:39: ifExists
					{
					pushFollow(FOLLOW_ifExists_in_dropMaterializedViewStatement11316);
					ifExists704=ifExists();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_ifExists.add(ifExists704.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_viewName_in_dropMaterializedViewStatement11319);
			viewName705=viewName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_viewName.add(viewName705.getTree());
			// AST REWRITE
			// elements: ifExists, viewName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1952:58: -> ^( TOK_DROP_MATERIALIZED_VIEW viewName ( ifExists )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1952:61: ^( TOK_DROP_MATERIALIZED_VIEW viewName ( ifExists )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DROP_MATERIALIZED_VIEW, "TOK_DROP_MATERIALIZED_VIEW"), root_1);
				adaptor.addChild(root_1, stream_viewName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1952:99: ( ifExists )?
				if ( stream_ifExists.hasNext() ) {
					adaptor.addChild(root_1, stream_ifExists.nextTree());
				}
				stream_ifExists.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "dropMaterializedViewStatement"


	public static class showFunctionIdentifier_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showFunctionIdentifier"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1955:1: showFunctionIdentifier : ( functionIdentifier | StringLiteral );
	public final HiveParser.showFunctionIdentifier_return showFunctionIdentifier() throws RecognitionException {
		HiveParser.showFunctionIdentifier_return retval = new HiveParser.showFunctionIdentifier_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token StringLiteral707=null;
		ParserRuleReturnScope functionIdentifier706 =null;

		ASTNode StringLiteral707_tree=null;

		 pushMsg("identifier for show function statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1958:5: ( functionIdentifier | StringLiteral )
			int alt215=2;
			int LA215_0 = input.LA(1);
			if ( (LA215_0==Identifier||(LA215_0 >= KW_ABORT && LA215_0 <= KW_AFTER)||LA215_0==KW_ALLOC_FRACTION||LA215_0==KW_ANALYZE||LA215_0==KW_ARCHIVE||LA215_0==KW_ASC||(LA215_0 >= KW_AUTOCOMMIT && LA215_0 <= KW_BEFORE)||(LA215_0 >= KW_BUCKET && LA215_0 <= KW_BUCKETS)||(LA215_0 >= KW_CACHE && LA215_0 <= KW_CASCADE)||LA215_0==KW_CHANGE||(LA215_0 >= KW_CHECK && LA215_0 <= KW_COLLECTION)||(LA215_0 >= KW_COLUMNS && LA215_0 <= KW_COMMENT)||(LA215_0 >= KW_COMPACT && LA215_0 <= KW_CONCATENATE)||LA215_0==KW_CONTINUE||LA215_0==KW_DATA||LA215_0==KW_DATABASES||(LA215_0 >= KW_DATETIME && LA215_0 <= KW_DBPROPERTIES)||(LA215_0 >= KW_DEFAULT && LA215_0 <= KW_DEFINED)||(LA215_0 >= KW_DELIMITED && LA215_0 <= KW_DESC)||(LA215_0 >= KW_DETAIL && LA215_0 <= KW_DISABLE)||(LA215_0 >= KW_DISTRIBUTE && LA215_0 <= KW_DO)||LA215_0==KW_DOW||(LA215_0 >= KW_DUMP && LA215_0 <= KW_ELEM_TYPE)||LA215_0==KW_ENABLE||(LA215_0 >= KW_ENFORCED && LA215_0 <= KW_ESCAPED)||LA215_0==KW_EXCLUSIVE||(LA215_0 >= KW_EXPLAIN && LA215_0 <= KW_EXPRESSION)||(LA215_0 >= KW_FIELDS && LA215_0 <= KW_FIRST)||(LA215_0 >= KW_FORMAT && LA215_0 <= KW_FORMATTED)||LA215_0==KW_FUNCTIONS||(LA215_0 >= KW_HOUR && LA215_0 <= KW_IDXPROPERTIES)||(LA215_0 >= KW_INDEX && LA215_0 <= KW_INDEXES)||(LA215_0 >= KW_INPATH && LA215_0 <= KW_INPUTFORMAT)||(LA215_0 >= KW_ISOLATION && LA215_0 <= KW_JAR)||(LA215_0 >= KW_KEY && LA215_0 <= KW_LAST)||LA215_0==KW_LEVEL||(LA215_0 >= KW_LIMIT && LA215_0 <= KW_LOAD)||(LA215_0 >= KW_LOCATION && LA215_0 <= KW_LONG)||LA215_0==KW_MANAGEMENT||(LA215_0 >= KW_MAPJOIN && LA215_0 <= KW_MATERIALIZED)||LA215_0==KW_METADATA||(LA215_0 >= KW_MINUTE && LA215_0 <= KW_MONTH)||(LA215_0 >= KW_MOVE && LA215_0 <= KW_MSCK)||(LA215_0 >= KW_NORELY && LA215_0 <= KW_NOSCAN)||LA215_0==KW_NOVALIDATE||LA215_0==KW_NULLS||LA215_0==KW_OFFSET||(LA215_0 >= KW_OPERATOR && LA215_0 <= KW_OPTION)||(LA215_0 >= KW_OUTPUTDRIVER && LA215_0 <= KW_OUTPUTFORMAT)||(LA215_0 >= KW_OVERWRITE && LA215_0 <= KW_OWNER)||(LA215_0 >= KW_PARTITIONED && LA215_0 <= KW_PATH)||(LA215_0 >= KW_PLAN && LA215_0 <= KW_POOL)||LA215_0==KW_PRINCIPALS||(LA215_0 >= KW_PURGE && LA215_0 <= KW_QUERY_PARALLELISM)||LA215_0==KW_READ||(LA215_0 >= KW_REBUILD && LA215_0 <= KW_RECORDWRITER)||(LA215_0 >= KW_RELOAD && LA215_0 <= KW_RESTRICT)||LA215_0==KW_REWRITE||(LA215_0 >= KW_ROLE && LA215_0 <= KW_ROLES)||(LA215_0 >= KW_SCHEDULING_POLICY && LA215_0 <= KW_SECOND)||(LA215_0 >= KW_SEMI && LA215_0 <= KW_SERVER)||(LA215_0 >= KW_SETS && LA215_0 <= KW_SKEWED)||(LA215_0 >= KW_SNAPSHOT && LA215_0 <= KW_SSL)||(LA215_0 >= KW_STATISTICS && LA215_0 <= KW_SUMMARY)||LA215_0==KW_TABLES||(LA215_0 >= KW_TBLPROPERTIES && LA215_0 <= KW_TERMINATED)||LA215_0==KW_TINYINT||(LA215_0 >= KW_TOUCH && LA215_0 <= KW_TRANSACTIONS)||LA215_0==KW_UNARCHIVE||LA215_0==KW_UNDO||LA215_0==KW_UNIONTYPE||(LA215_0 >= KW_UNLOCK && LA215_0 <= KW_UNSIGNED)||(LA215_0 >= KW_URI && LA215_0 <= KW_USE)||(LA215_0 >= KW_UTC && LA215_0 <= KW_VALIDATE)||LA215_0==KW_VALUE_TYPE||(LA215_0 >= KW_VECTORIZATION && LA215_0 <= KW_WEEK)||LA215_0==KW_WHILE||(LA215_0 >= KW_WORK && LA215_0 <= KW_ZONE)||LA215_0==KW_BATCH||LA215_0==KW_DAYOFWEEK||LA215_0==KW_HOLD_DDLTIME||LA215_0==KW_IGNORE||LA215_0==KW_NO_DROP||LA215_0==KW_OFFLINE||LA215_0==KW_PROTECTION||LA215_0==KW_READONLY||LA215_0==KW_TIMESTAMPTZ) ) {
				alt215=1;
			}
			else if ( (LA215_0==StringLiteral) ) {
				alt215=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 215, 0, input);
				throw nvae;
			}

			switch (alt215) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1958:7: functionIdentifier
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_functionIdentifier_in_showFunctionIdentifier11357);
					functionIdentifier706=functionIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, functionIdentifier706.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1959:7: StringLiteral
					{
					root_0 = (ASTNode)adaptor.nil();


					StringLiteral707=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showFunctionIdentifier11365); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					StringLiteral707_tree = (ASTNode)adaptor.create(StringLiteral707);
					adaptor.addChild(root_0, StringLiteral707_tree);
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showFunctionIdentifier"


	public static class showStmtIdentifier_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "showStmtIdentifier"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1962:1: showStmtIdentifier : ( identifier | StringLiteral );
	public final HiveParser.showStmtIdentifier_return showStmtIdentifier() throws RecognitionException {
		HiveParser.showStmtIdentifier_return retval = new HiveParser.showStmtIdentifier_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token StringLiteral709=null;
		ParserRuleReturnScope identifier708 =null;

		ASTNode StringLiteral709_tree=null;

		 pushMsg("identifier for show statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1965:5: ( identifier | StringLiteral )
			int alt216=2;
			int LA216_0 = input.LA(1);
			if ( (LA216_0==Identifier||(LA216_0 >= KW_ABORT && LA216_0 <= KW_AFTER)||LA216_0==KW_ALLOC_FRACTION||LA216_0==KW_ANALYZE||LA216_0==KW_ARCHIVE||LA216_0==KW_ASC||(LA216_0 >= KW_AUTOCOMMIT && LA216_0 <= KW_BEFORE)||(LA216_0 >= KW_BUCKET && LA216_0 <= KW_BUCKETS)||(LA216_0 >= KW_CACHE && LA216_0 <= KW_CASCADE)||LA216_0==KW_CHANGE||(LA216_0 >= KW_CHECK && LA216_0 <= KW_COLLECTION)||(LA216_0 >= KW_COLUMNS && LA216_0 <= KW_COMMENT)||(LA216_0 >= KW_COMPACT && LA216_0 <= KW_CONCATENATE)||LA216_0==KW_CONTINUE||LA216_0==KW_DATA||LA216_0==KW_DATABASES||(LA216_0 >= KW_DATETIME && LA216_0 <= KW_DBPROPERTIES)||(LA216_0 >= KW_DEFAULT && LA216_0 <= KW_DEFINED)||(LA216_0 >= KW_DELIMITED && LA216_0 <= KW_DESC)||(LA216_0 >= KW_DETAIL && LA216_0 <= KW_DISABLE)||(LA216_0 >= KW_DISTRIBUTE && LA216_0 <= KW_DO)||LA216_0==KW_DOW||(LA216_0 >= KW_DUMP && LA216_0 <= KW_ELEM_TYPE)||LA216_0==KW_ENABLE||(LA216_0 >= KW_ENFORCED && LA216_0 <= KW_ESCAPED)||LA216_0==KW_EXCLUSIVE||(LA216_0 >= KW_EXPLAIN && LA216_0 <= KW_EXPRESSION)||(LA216_0 >= KW_FIELDS && LA216_0 <= KW_FIRST)||(LA216_0 >= KW_FORMAT && LA216_0 <= KW_FORMATTED)||LA216_0==KW_FUNCTIONS||(LA216_0 >= KW_HOUR && LA216_0 <= KW_IDXPROPERTIES)||(LA216_0 >= KW_INDEX && LA216_0 <= KW_INDEXES)||(LA216_0 >= KW_INPATH && LA216_0 <= KW_INPUTFORMAT)||(LA216_0 >= KW_ISOLATION && LA216_0 <= KW_JAR)||(LA216_0 >= KW_KEY && LA216_0 <= KW_LAST)||LA216_0==KW_LEVEL||(LA216_0 >= KW_LIMIT && LA216_0 <= KW_LOAD)||(LA216_0 >= KW_LOCATION && LA216_0 <= KW_LONG)||LA216_0==KW_MANAGEMENT||(LA216_0 >= KW_MAPJOIN && LA216_0 <= KW_MATERIALIZED)||LA216_0==KW_METADATA||(LA216_0 >= KW_MINUTE && LA216_0 <= KW_MONTH)||(LA216_0 >= KW_MOVE && LA216_0 <= KW_MSCK)||(LA216_0 >= KW_NORELY && LA216_0 <= KW_NOSCAN)||LA216_0==KW_NOVALIDATE||LA216_0==KW_NULLS||LA216_0==KW_OFFSET||(LA216_0 >= KW_OPERATOR && LA216_0 <= KW_OPTION)||(LA216_0 >= KW_OUTPUTDRIVER && LA216_0 <= KW_OUTPUTFORMAT)||(LA216_0 >= KW_OVERWRITE && LA216_0 <= KW_OWNER)||(LA216_0 >= KW_PARTITIONED && LA216_0 <= KW_PATH)||(LA216_0 >= KW_PLAN && LA216_0 <= KW_POOL)||LA216_0==KW_PRINCIPALS||(LA216_0 >= KW_PURGE && LA216_0 <= KW_QUERY_PARALLELISM)||LA216_0==KW_READ||(LA216_0 >= KW_REBUILD && LA216_0 <= KW_RECORDWRITER)||(LA216_0 >= KW_RELOAD && LA216_0 <= KW_RESTRICT)||LA216_0==KW_REWRITE||(LA216_0 >= KW_ROLE && LA216_0 <= KW_ROLES)||(LA216_0 >= KW_SCHEDULING_POLICY && LA216_0 <= KW_SECOND)||(LA216_0 >= KW_SEMI && LA216_0 <= KW_SERVER)||(LA216_0 >= KW_SETS && LA216_0 <= KW_SKEWED)||(LA216_0 >= KW_SNAPSHOT && LA216_0 <= KW_SSL)||(LA216_0 >= KW_STATISTICS && LA216_0 <= KW_SUMMARY)||LA216_0==KW_TABLES||(LA216_0 >= KW_TBLPROPERTIES && LA216_0 <= KW_TERMINATED)||LA216_0==KW_TINYINT||(LA216_0 >= KW_TOUCH && LA216_0 <= KW_TRANSACTIONS)||LA216_0==KW_UNARCHIVE||LA216_0==KW_UNDO||LA216_0==KW_UNIONTYPE||(LA216_0 >= KW_UNLOCK && LA216_0 <= KW_UNSIGNED)||(LA216_0 >= KW_URI && LA216_0 <= KW_USE)||(LA216_0 >= KW_UTC && LA216_0 <= KW_VALIDATE)||LA216_0==KW_VALUE_TYPE||(LA216_0 >= KW_VECTORIZATION && LA216_0 <= KW_WEEK)||LA216_0==KW_WHILE||(LA216_0 >= KW_WORK && LA216_0 <= KW_ZONE)||LA216_0==KW_BATCH||LA216_0==KW_DAYOFWEEK||LA216_0==KW_HOLD_DDLTIME||LA216_0==KW_IGNORE||LA216_0==KW_NO_DROP||LA216_0==KW_OFFLINE||LA216_0==KW_PROTECTION||LA216_0==KW_READONLY||LA216_0==KW_TIMESTAMPTZ) ) {
				alt216=1;
			}
			else if ( (LA216_0==StringLiteral) ) {
				alt216=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 216, 0, input);
				throw nvae;
			}

			switch (alt216) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1965:7: identifier
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_identifier_in_showStmtIdentifier11392);
					identifier708=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier708.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1966:7: StringLiteral
					{
					root_0 = (ASTNode)adaptor.nil();


					StringLiteral709=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showStmtIdentifier11400); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					StringLiteral709_tree = (ASTNode)adaptor.create(StringLiteral709);
					adaptor.addChild(root_0, StringLiteral709_tree);
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "showStmtIdentifier"


	public static class tableComment_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableComment"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1969:1: tableComment : KW_COMMENT comment= StringLiteral -> ^( TOK_TABLECOMMENT $comment) ;
	public final HiveParser.tableComment_return tableComment() throws RecognitionException {
		HiveParser.tableComment_return retval = new HiveParser.tableComment_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_COMMENT710=null;

		ASTNode comment_tree=null;
		ASTNode KW_COMMENT710_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");

		 pushMsg("table's comment", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1972:5: ( KW_COMMENT comment= StringLiteral -> ^( TOK_TABLECOMMENT $comment) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1973:7: KW_COMMENT comment= StringLiteral
			{
			KW_COMMENT710=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_tableComment11433); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT710);

			comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableComment11437); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

			// AST REWRITE
			// elements: comment
			// token labels: comment
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1973:41: -> ^( TOK_TABLECOMMENT $comment)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1973:44: ^( TOK_TABLECOMMENT $comment)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLECOMMENT, "TOK_TABLECOMMENT"), root_1);
				adaptor.addChild(root_1, stream_comment.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableComment"


	public static class tablePartition_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tablePartition"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1976:1: tablePartition : KW_PARTITIONED KW_BY LPAREN columnNameTypeConstraint ( COMMA columnNameTypeConstraint )* RPAREN -> ^( TOK_TABLEPARTCOLS ( columnNameTypeConstraint )+ ) ;
	public final HiveParser.tablePartition_return tablePartition() throws RecognitionException {
		HiveParser.tablePartition_return retval = new HiveParser.tablePartition_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_PARTITIONED711=null;
		Token KW_BY712=null;
		Token LPAREN713=null;
		Token COMMA715=null;
		Token RPAREN717=null;
		ParserRuleReturnScope columnNameTypeConstraint714 =null;
		ParserRuleReturnScope columnNameTypeConstraint716 =null;

		ASTNode KW_PARTITIONED711_tree=null;
		ASTNode KW_BY712_tree=null;
		ASTNode LPAREN713_tree=null;
		ASTNode COMMA715_tree=null;
		ASTNode RPAREN717_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_KW_PARTITIONED=new RewriteRuleTokenStream(adaptor,"token KW_PARTITIONED");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_columnNameTypeConstraint=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeConstraint");

		 pushMsg("table partition specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:5: ( KW_PARTITIONED KW_BY LPAREN columnNameTypeConstraint ( COMMA columnNameTypeConstraint )* RPAREN -> ^( TOK_TABLEPARTCOLS ( columnNameTypeConstraint )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:7: KW_PARTITIONED KW_BY LPAREN columnNameTypeConstraint ( COMMA columnNameTypeConstraint )* RPAREN
			{
			KW_PARTITIONED711=(Token)match(input,KW_PARTITIONED,FOLLOW_KW_PARTITIONED_in_tablePartition11474); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_PARTITIONED.add(KW_PARTITIONED711);

			KW_BY712=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tablePartition11476); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY712);

			LPAREN713=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tablePartition11478); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN713);

			pushFollow(FOLLOW_columnNameTypeConstraint_in_tablePartition11480);
			columnNameTypeConstraint714=columnNameTypeConstraint();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameTypeConstraint.add(columnNameTypeConstraint714.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:60: ( COMMA columnNameTypeConstraint )*
			loop217:
			while (true) {
				int alt217=2;
				int LA217_0 = input.LA(1);
				if ( (LA217_0==COMMA) ) {
					alt217=1;
				}

				switch (alt217) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:61: COMMA columnNameTypeConstraint
					{
					COMMA715=(Token)match(input,COMMA,FOLLOW_COMMA_in_tablePartition11483); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA715);

					pushFollow(FOLLOW_columnNameTypeConstraint_in_tablePartition11485);
					columnNameTypeConstraint716=columnNameTypeConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameTypeConstraint.add(columnNameTypeConstraint716.getTree());
					}
					break;

				default :
					break loop217;
				}
			}

			RPAREN717=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tablePartition11489); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN717);

			// AST REWRITE
			// elements: columnNameTypeConstraint
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1980:5: -> ^( TOK_TABLEPARTCOLS ( columnNameTypeConstraint )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1980:8: ^( TOK_TABLEPARTCOLS ( columnNameTypeConstraint )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEPARTCOLS, "TOK_TABLEPARTCOLS"), root_1);
				if ( !(stream_columnNameTypeConstraint.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_columnNameTypeConstraint.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameTypeConstraint.nextTree());
				}
				stream_columnNameTypeConstraint.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tablePartition"


	public static class tableBuckets_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableBuckets"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1983:1: tableBuckets : KW_CLUSTERED KW_BY LPAREN bucketCols= columnNameList RPAREN ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )? KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num) ;
	public final HiveParser.tableBuckets_return tableBuckets() throws RecognitionException {
		HiveParser.tableBuckets_return retval = new HiveParser.tableBuckets_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token num=null;
		Token KW_CLUSTERED718=null;
		Token KW_BY719=null;
		Token LPAREN720=null;
		Token RPAREN721=null;
		Token KW_SORTED722=null;
		Token KW_BY723=null;
		Token LPAREN724=null;
		Token RPAREN725=null;
		Token KW_INTO726=null;
		Token KW_BUCKETS727=null;
		ParserRuleReturnScope bucketCols =null;
		ParserRuleReturnScope sortCols =null;

		ASTNode num_tree=null;
		ASTNode KW_CLUSTERED718_tree=null;
		ASTNode KW_BY719_tree=null;
		ASTNode LPAREN720_tree=null;
		ASTNode RPAREN721_tree=null;
		ASTNode KW_SORTED722_tree=null;
		ASTNode KW_BY723_tree=null;
		ASTNode LPAREN724_tree=null;
		ASTNode RPAREN725_tree=null;
		ASTNode KW_INTO726_tree=null;
		ASTNode KW_BUCKETS727_tree=null;
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_KW_SORTED=new RewriteRuleTokenStream(adaptor,"token KW_SORTED");
		RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
		RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
		RewriteRuleTokenStream stream_KW_BUCKETS=new RewriteRuleTokenStream(adaptor,"token KW_BUCKETS");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_CLUSTERED=new RewriteRuleTokenStream(adaptor,"token KW_CLUSTERED");
		RewriteRuleSubtreeStream stream_columnNameOrderList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameOrderList");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");

		 pushMsg("table buckets specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1986:5: ( KW_CLUSTERED KW_BY LPAREN bucketCols= columnNameList RPAREN ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )? KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1987:7: KW_CLUSTERED KW_BY LPAREN bucketCols= columnNameList RPAREN ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )? KW_INTO num= Number KW_BUCKETS
			{
			KW_CLUSTERED718=(Token)match(input,KW_CLUSTERED,FOLLOW_KW_CLUSTERED_in_tableBuckets11535); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CLUSTERED.add(KW_CLUSTERED718);

			KW_BY719=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableBuckets11537); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY719);

			LPAREN720=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableBuckets11539); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN720);

			pushFollow(FOLLOW_columnNameList_in_tableBuckets11543);
			bucketCols=columnNameList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameList.add(bucketCols.getTree());
			RPAREN721=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableBuckets11545); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN721);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1987:66: ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )?
			int alt218=2;
			int LA218_0 = input.LA(1);
			if ( (LA218_0==KW_SORTED) ) {
				alt218=1;
			}
			switch (alt218) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1987:67: KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN
					{
					KW_SORTED722=(Token)match(input,KW_SORTED,FOLLOW_KW_SORTED_in_tableBuckets11548); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SORTED.add(KW_SORTED722);

					KW_BY723=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableBuckets11550); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY723);

					LPAREN724=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableBuckets11552); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN724);

					pushFollow(FOLLOW_columnNameOrderList_in_tableBuckets11556);
					sortCols=columnNameOrderList();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameOrderList.add(sortCols.getTree());
					RPAREN725=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableBuckets11558); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN725);

					}
					break;

			}

			KW_INTO726=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_tableBuckets11562); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_INTO.add(KW_INTO726);

			num=(Token)match(input,Number,FOLLOW_Number_in_tableBuckets11566); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_Number.add(num);

			KW_BUCKETS727=(Token)match(input,KW_BUCKETS,FOLLOW_KW_BUCKETS_in_tableBuckets11568); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BUCKETS.add(KW_BUCKETS727);

			// AST REWRITE
			// elements: bucketCols, num, sortCols
			// token labels: num
			// rule labels: bucketCols, sortCols, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_num=new RewriteRuleTokenStream(adaptor,"token num",num);
			RewriteRuleSubtreeStream stream_bucketCols=new RewriteRuleSubtreeStream(adaptor,"rule bucketCols",bucketCols!=null?bucketCols.getTree():null);
			RewriteRuleSubtreeStream stream_sortCols=new RewriteRuleSubtreeStream(adaptor,"rule sortCols",sortCols!=null?sortCols.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1988:5: -> ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1988:8: ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ALTERTABLE_BUCKETS, "TOK_ALTERTABLE_BUCKETS"), root_1);
				adaptor.addChild(root_1, stream_bucketCols.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1988:46: ( $sortCols)?
				if ( stream_sortCols.hasNext() ) {
					adaptor.addChild(root_1, stream_sortCols.nextTree());
				}
				stream_sortCols.reset();

				adaptor.addChild(root_1, stream_num.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableBuckets"


	public static class tableSkewed_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableSkewed"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1991:1: tableSkewed : KW_SKEWED KW_BY LPAREN skewedCols= columnNameList RPAREN KW_ON LPAREN (skewedValues= skewedValueElement ) RPAREN ( ( storedAsDirs )=> storedAsDirs )? -> ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? ) ;
	public final HiveParser.tableSkewed_return tableSkewed() throws RecognitionException {
		HiveParser.tableSkewed_return retval = new HiveParser.tableSkewed_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SKEWED728=null;
		Token KW_BY729=null;
		Token LPAREN730=null;
		Token RPAREN731=null;
		Token KW_ON732=null;
		Token LPAREN733=null;
		Token RPAREN734=null;
		ParserRuleReturnScope skewedCols =null;
		ParserRuleReturnScope skewedValues =null;
		ParserRuleReturnScope storedAsDirs735 =null;

		ASTNode KW_SKEWED728_tree=null;
		ASTNode KW_BY729_tree=null;
		ASTNode LPAREN730_tree=null;
		ASTNode RPAREN731_tree=null;
		ASTNode KW_ON732_tree=null;
		ASTNode LPAREN733_tree=null;
		ASTNode RPAREN734_tree=null;
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_SKEWED=new RewriteRuleTokenStream(adaptor,"token KW_SKEWED");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
		RewriteRuleSubtreeStream stream_skewedValueElement=new RewriteRuleSubtreeStream(adaptor,"rule skewedValueElement");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
		RewriteRuleSubtreeStream stream_storedAsDirs=new RewriteRuleSubtreeStream(adaptor,"rule storedAsDirs");

		 pushMsg("table skewed specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1994:5: ( KW_SKEWED KW_BY LPAREN skewedCols= columnNameList RPAREN KW_ON LPAREN (skewedValues= skewedValueElement ) RPAREN ( ( storedAsDirs )=> storedAsDirs )? -> ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:6: KW_SKEWED KW_BY LPAREN skewedCols= columnNameList RPAREN KW_ON LPAREN (skewedValues= skewedValueElement ) RPAREN ( ( storedAsDirs )=> storedAsDirs )?
			{
			KW_SKEWED728=(Token)match(input,KW_SKEWED,FOLLOW_KW_SKEWED_in_tableSkewed11620); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SKEWED.add(KW_SKEWED728);

			KW_BY729=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableSkewed11622); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY729);

			LPAREN730=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableSkewed11624); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN730);

			pushFollow(FOLLOW_columnNameList_in_tableSkewed11628);
			skewedCols=columnNameList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameList.add(skewedCols.getTree());
			RPAREN731=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableSkewed11630); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN731);

			KW_ON732=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_tableSkewed11632); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ON.add(KW_ON732);

			LPAREN733=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableSkewed11634); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN733);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:75: (skewedValues= skewedValueElement )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:76: skewedValues= skewedValueElement
			{
			pushFollow(FOLLOW_skewedValueElement_in_tableSkewed11639);
			skewedValues=skewedValueElement();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedValueElement.add(skewedValues.getTree());
			}

			RPAREN734=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableSkewed11642); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN734);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:116: ( ( storedAsDirs )=> storedAsDirs )?
			int alt219=2;
			int LA219_0 = input.LA(1);
			if ( (LA219_0==KW_STORED) ) {
				int LA219_1 = input.LA(2);
				if ( (LA219_1==KW_AS) ) {
					int LA219_7 = input.LA(3);
					if ( (LA219_7==KW_DIRECTORIES) ) {
						int LA219_9 = input.LA(4);
						if ( (synpred17_HiveParser()) ) {
							alt219=1;
						}
					}
				}
			}
			switch (alt219) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:117: ( storedAsDirs )=> storedAsDirs
					{
					pushFollow(FOLLOW_storedAsDirs_in_tableSkewed11651);
					storedAsDirs735=storedAsDirs();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_storedAsDirs.add(storedAsDirs735.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: skewedCols, skewedValues, storedAsDirs
			// token labels: 
			// rule labels: skewedCols, skewedValues, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_skewedCols=new RewriteRuleSubtreeStream(adaptor,"rule skewedCols",skewedCols!=null?skewedCols.getTree():null);
			RewriteRuleSubtreeStream stream_skewedValues=new RewriteRuleSubtreeStream(adaptor,"rule skewedValues",skewedValues!=null?skewedValues.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 1996:5: -> ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1996:8: ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLESKEWED, "TOK_TABLESKEWED"), root_1);
				adaptor.addChild(root_1, stream_skewedCols.nextTree());
				adaptor.addChild(root_1, stream_skewedValues.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:1996:52: ( storedAsDirs )?
				if ( stream_storedAsDirs.hasNext() ) {
					adaptor.addChild(root_1, stream_storedAsDirs.nextTree());
				}
				stream_storedAsDirs.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableSkewed"


	public static class rowFormat_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "rowFormat"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:1999:1: rowFormat : ( rowFormatSerde -> ^( TOK_SERDE rowFormatSerde ) | rowFormatDelimited -> ^( TOK_SERDE rowFormatDelimited ) | -> ^( TOK_SERDE ) );
	public final HiveParser.rowFormat_return rowFormat() throws RecognitionException {
		HiveParser.rowFormat_return retval = new HiveParser.rowFormat_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope rowFormatSerde736 =null;
		ParserRuleReturnScope rowFormatDelimited737 =null;

		RewriteRuleSubtreeStream stream_rowFormatSerde=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatSerde");
		RewriteRuleSubtreeStream stream_rowFormatDelimited=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatDelimited");

		 pushMsg("serde specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2002:5: ( rowFormatSerde -> ^( TOK_SERDE rowFormatSerde ) | rowFormatDelimited -> ^( TOK_SERDE rowFormatDelimited ) | -> ^( TOK_SERDE ) )
			int alt220=3;
			int LA220_0 = input.LA(1);
			if ( (LA220_0==KW_ROW) ) {
				int LA220_1 = input.LA(2);
				if ( (LA220_1==KW_FORMAT) ) {
					int LA220_27 = input.LA(3);
					if ( (LA220_27==KW_SERDE) ) {
						alt220=1;
					}
					else if ( (LA220_27==KW_DELIMITED) ) {
						alt220=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 220, 27, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 220, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}
			else if ( (LA220_0==EOF||LA220_0==COMMA||LA220_0==KW_CLUSTER||LA220_0==KW_DISTRIBUTE||LA220_0==KW_EXCEPT||LA220_0==KW_FROM||LA220_0==KW_GROUP||LA220_0==KW_HAVING||LA220_0==KW_INSERT||LA220_0==KW_INTERSECT||LA220_0==KW_LATERAL||LA220_0==KW_LIMIT||LA220_0==KW_MAP||LA220_0==KW_MINUS||LA220_0==KW_ORDER||(LA220_0 >= KW_RECORDREADER && LA220_0 <= KW_REDUCE)||LA220_0==KW_SELECT||LA220_0==KW_SORT||LA220_0==KW_UNION||LA220_0==KW_USING||LA220_0==KW_WHERE||LA220_0==KW_WINDOW||LA220_0==RPAREN) ) {
				alt220=3;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 220, 0, input);
				throw nvae;
			}

			switch (alt220) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2002:7: rowFormatSerde
					{
					pushFollow(FOLLOW_rowFormatSerde_in_rowFormat11699);
					rowFormatSerde736=rowFormatSerde();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_rowFormatSerde.add(rowFormatSerde736.getTree());
					// AST REWRITE
					// elements: rowFormatSerde
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2002:22: -> ^( TOK_SERDE rowFormatSerde )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2002:25: ^( TOK_SERDE rowFormatSerde )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SERDE, "TOK_SERDE"), root_1);
						adaptor.addChild(root_1, stream_rowFormatSerde.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2003:7: rowFormatDelimited
					{
					pushFollow(FOLLOW_rowFormatDelimited_in_rowFormat11715);
					rowFormatDelimited737=rowFormatDelimited();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_rowFormatDelimited.add(rowFormatDelimited737.getTree());
					// AST REWRITE
					// elements: rowFormatDelimited
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2003:26: -> ^( TOK_SERDE rowFormatDelimited )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2003:29: ^( TOK_SERDE rowFormatDelimited )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SERDE, "TOK_SERDE"), root_1);
						adaptor.addChild(root_1, stream_rowFormatDelimited.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:9: 
					{
					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2004:9: -> ^( TOK_SERDE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:12: ^( TOK_SERDE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SERDE, "TOK_SERDE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "rowFormat"


	public static class recordReader_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "recordReader"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2007:1: recordReader : ( KW_RECORDREADER StringLiteral -> ^( TOK_RECORDREADER StringLiteral ) | -> ^( TOK_RECORDREADER ) );
	public final HiveParser.recordReader_return recordReader() throws RecognitionException {
		HiveParser.recordReader_return retval = new HiveParser.recordReader_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_RECORDREADER738=null;
		Token StringLiteral739=null;

		ASTNode KW_RECORDREADER738_tree=null;
		ASTNode StringLiteral739_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_RECORDREADER=new RewriteRuleTokenStream(adaptor,"token KW_RECORDREADER");

		 pushMsg("record reader specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2010:5: ( KW_RECORDREADER StringLiteral -> ^( TOK_RECORDREADER StringLiteral ) | -> ^( TOK_RECORDREADER ) )
			int alt221=2;
			int LA221_0 = input.LA(1);
			if ( (LA221_0==KW_RECORDREADER) ) {
				alt221=1;
			}
			else if ( (LA221_0==EOF||LA221_0==COMMA||LA221_0==KW_CLUSTER||LA221_0==KW_DISTRIBUTE||LA221_0==KW_EXCEPT||LA221_0==KW_FROM||LA221_0==KW_GROUP||LA221_0==KW_HAVING||LA221_0==KW_INSERT||LA221_0==KW_INTERSECT||LA221_0==KW_LATERAL||LA221_0==KW_LIMIT||LA221_0==KW_MAP||LA221_0==KW_MINUS||LA221_0==KW_ORDER||LA221_0==KW_REDUCE||LA221_0==KW_SELECT||LA221_0==KW_SORT||LA221_0==KW_UNION||LA221_0==KW_WHERE||LA221_0==KW_WINDOW||LA221_0==RPAREN) ) {
				alt221=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 221, 0, input);
				throw nvae;
			}

			switch (alt221) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2010:7: KW_RECORDREADER StringLiteral
					{
					KW_RECORDREADER738=(Token)match(input,KW_RECORDREADER,FOLLOW_KW_RECORDREADER_in_recordReader11764); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_RECORDREADER.add(KW_RECORDREADER738);

					StringLiteral739=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_recordReader11766); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(StringLiteral739);

					// AST REWRITE
					// elements: StringLiteral
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2010:37: -> ^( TOK_RECORDREADER StringLiteral )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2010:40: ^( TOK_RECORDREADER StringLiteral )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RECORDREADER, "TOK_RECORDREADER"), root_1);
						adaptor.addChild(root_1, stream_StringLiteral.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2011:9: 
					{
					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2011:9: -> ^( TOK_RECORDREADER )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2011:12: ^( TOK_RECORDREADER )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RECORDREADER, "TOK_RECORDREADER"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "recordReader"


	public static class recordWriter_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "recordWriter"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2014:1: recordWriter : ( KW_RECORDWRITER StringLiteral -> ^( TOK_RECORDWRITER StringLiteral ) | -> ^( TOK_RECORDWRITER ) );
	public final HiveParser.recordWriter_return recordWriter() throws RecognitionException {
		HiveParser.recordWriter_return retval = new HiveParser.recordWriter_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_RECORDWRITER740=null;
		Token StringLiteral741=null;

		ASTNode KW_RECORDWRITER740_tree=null;
		ASTNode StringLiteral741_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_RECORDWRITER=new RewriteRuleTokenStream(adaptor,"token KW_RECORDWRITER");

		 pushMsg("record writer specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2017:5: ( KW_RECORDWRITER StringLiteral -> ^( TOK_RECORDWRITER StringLiteral ) | -> ^( TOK_RECORDWRITER ) )
			int alt222=2;
			int LA222_0 = input.LA(1);
			if ( (LA222_0==KW_RECORDWRITER) ) {
				alt222=1;
			}
			else if ( (LA222_0==KW_USING) ) {
				alt222=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 222, 0, input);
				throw nvae;
			}

			switch (alt222) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2017:7: KW_RECORDWRITER StringLiteral
					{
					KW_RECORDWRITER740=(Token)match(input,KW_RECORDWRITER,FOLLOW_KW_RECORDWRITER_in_recordWriter11815); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_RECORDWRITER.add(KW_RECORDWRITER740);

					StringLiteral741=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_recordWriter11817); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(StringLiteral741);

					// AST REWRITE
					// elements: StringLiteral
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2017:37: -> ^( TOK_RECORDWRITER StringLiteral )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2017:40: ^( TOK_RECORDWRITER StringLiteral )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RECORDWRITER, "TOK_RECORDWRITER"), root_1);
						adaptor.addChild(root_1, stream_StringLiteral.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2018:9: 
					{
					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2018:9: -> ^( TOK_RECORDWRITER )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2018:12: ^( TOK_RECORDWRITER )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RECORDWRITER, "TOK_RECORDWRITER"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "recordWriter"


	public static class rowFormatSerde_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "rowFormatSerde"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2021:1: rowFormatSerde : KW_ROW KW_FORMAT KW_SERDE name= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_SERDENAME $name ( $serdeprops)? ) ;
	public final HiveParser.rowFormatSerde_return rowFormatSerde() throws RecognitionException {
		HiveParser.rowFormatSerde_return retval = new HiveParser.rowFormatSerde_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token name=null;
		Token KW_ROW742=null;
		Token KW_FORMAT743=null;
		Token KW_SERDE744=null;
		Token KW_WITH745=null;
		Token KW_SERDEPROPERTIES746=null;
		ParserRuleReturnScope serdeprops =null;

		ASTNode name_tree=null;
		ASTNode KW_ROW742_tree=null;
		ASTNode KW_FORMAT743_tree=null;
		ASTNode KW_SERDE744_tree=null;
		ASTNode KW_WITH745_tree=null;
		ASTNode KW_SERDEPROPERTIES746_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_ROW=new RewriteRuleTokenStream(adaptor,"token KW_ROW");
		RewriteRuleTokenStream stream_KW_SERDEPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_SERDEPROPERTIES");
		RewriteRuleTokenStream stream_KW_SERDE=new RewriteRuleTokenStream(adaptor,"token KW_SERDE");
		RewriteRuleTokenStream stream_KW_FORMAT=new RewriteRuleTokenStream(adaptor,"token KW_FORMAT");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");

		 pushMsg("serde format specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2024:5: ( KW_ROW KW_FORMAT KW_SERDE name= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_SERDENAME $name ( $serdeprops)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2024:7: KW_ROW KW_FORMAT KW_SERDE name= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
			{
			KW_ROW742=(Token)match(input,KW_ROW,FOLLOW_KW_ROW_in_rowFormatSerde11866); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROW.add(KW_ROW742);

			KW_FORMAT743=(Token)match(input,KW_FORMAT,FOLLOW_KW_FORMAT_in_rowFormatSerde11868); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FORMAT.add(KW_FORMAT743);

			KW_SERDE744=(Token)match(input,KW_SERDE,FOLLOW_KW_SERDE_in_rowFormatSerde11870); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SERDE.add(KW_SERDE744);

			name=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_rowFormatSerde11874); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(name);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2024:52: ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
			int alt223=2;
			int LA223_0 = input.LA(1);
			if ( (LA223_0==KW_WITH) ) {
				alt223=1;
			}
			switch (alt223) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2024:53: KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties
					{
					KW_WITH745=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_rowFormatSerde11877); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH745);

					KW_SERDEPROPERTIES746=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_rowFormatSerde11879); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES746);

					pushFollow(FOLLOW_tableProperties_in_rowFormatSerde11883);
					serdeprops=tableProperties();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableProperties.add(serdeprops.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: serdeprops, name
			// token labels: name
			// rule labels: serdeprops, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_name=new RewriteRuleTokenStream(adaptor,"token name",name);
			RewriteRuleSubtreeStream stream_serdeprops=new RewriteRuleSubtreeStream(adaptor,"rule serdeprops",serdeprops!=null?serdeprops.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2025:5: -> ^( TOK_SERDENAME $name ( $serdeprops)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2025:8: ^( TOK_SERDENAME $name ( $serdeprops)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SERDENAME, "TOK_SERDENAME"), root_1);
				adaptor.addChild(root_1, stream_name.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2025:31: ( $serdeprops)?
				if ( stream_serdeprops.hasNext() ) {
					adaptor.addChild(root_1, stream_serdeprops.nextTree());
				}
				stream_serdeprops.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "rowFormatSerde"


	public static class rowFormatDelimited_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "rowFormatDelimited"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2028:1: rowFormatDelimited : KW_ROW KW_FORMAT KW_DELIMITED ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? -> ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? ) ;
	public final HiveParser.rowFormatDelimited_return rowFormatDelimited() throws RecognitionException {
		HiveParser.rowFormatDelimited_return retval = new HiveParser.rowFormatDelimited_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ROW747=null;
		Token KW_FORMAT748=null;
		Token KW_DELIMITED749=null;
		ParserRuleReturnScope tableRowFormatFieldIdentifier750 =null;
		ParserRuleReturnScope tableRowFormatCollItemsIdentifier751 =null;
		ParserRuleReturnScope tableRowFormatMapKeysIdentifier752 =null;
		ParserRuleReturnScope tableRowFormatLinesIdentifier753 =null;
		ParserRuleReturnScope tableRowNullFormat754 =null;

		ASTNode KW_ROW747_tree=null;
		ASTNode KW_FORMAT748_tree=null;
		ASTNode KW_DELIMITED749_tree=null;
		RewriteRuleTokenStream stream_KW_ROW=new RewriteRuleTokenStream(adaptor,"token KW_ROW");
		RewriteRuleTokenStream stream_KW_DELIMITED=new RewriteRuleTokenStream(adaptor,"token KW_DELIMITED");
		RewriteRuleTokenStream stream_KW_FORMAT=new RewriteRuleTokenStream(adaptor,"token KW_FORMAT");
		RewriteRuleSubtreeStream stream_tableRowNullFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowNullFormat");
		RewriteRuleSubtreeStream stream_tableRowFormatFieldIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatFieldIdentifier");
		RewriteRuleSubtreeStream stream_tableRowFormatCollItemsIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatCollItemsIdentifier");
		RewriteRuleSubtreeStream stream_tableRowFormatMapKeysIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatMapKeysIdentifier");
		RewriteRuleSubtreeStream stream_tableRowFormatLinesIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatLinesIdentifier");

		 pushMsg("serde properties specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2031:5: ( KW_ROW KW_FORMAT KW_DELIMITED ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? -> ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:7: KW_ROW KW_FORMAT KW_DELIMITED ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )?
			{
			KW_ROW747=(Token)match(input,KW_ROW,FOLLOW_KW_ROW_in_rowFormatDelimited11935); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROW.add(KW_ROW747);

			KW_FORMAT748=(Token)match(input,KW_FORMAT,FOLLOW_KW_FORMAT_in_rowFormatDelimited11937); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FORMAT.add(KW_FORMAT748);

			KW_DELIMITED749=(Token)match(input,KW_DELIMITED,FOLLOW_KW_DELIMITED_in_rowFormatDelimited11939); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DELIMITED.add(KW_DELIMITED749);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:37: ( tableRowFormatFieldIdentifier )?
			int alt224=2;
			int LA224_0 = input.LA(1);
			if ( (LA224_0==KW_FIELDS) ) {
				alt224=1;
			}
			switch (alt224) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:37: tableRowFormatFieldIdentifier
					{
					pushFollow(FOLLOW_tableRowFormatFieldIdentifier_in_rowFormatDelimited11941);
					tableRowFormatFieldIdentifier750=tableRowFormatFieldIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableRowFormatFieldIdentifier.add(tableRowFormatFieldIdentifier750.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:68: ( tableRowFormatCollItemsIdentifier )?
			int alt225=2;
			int LA225_0 = input.LA(1);
			if ( (LA225_0==KW_COLLECTION) ) {
				alt225=1;
			}
			switch (alt225) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:68: tableRowFormatCollItemsIdentifier
					{
					pushFollow(FOLLOW_tableRowFormatCollItemsIdentifier_in_rowFormatDelimited11944);
					tableRowFormatCollItemsIdentifier751=tableRowFormatCollItemsIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableRowFormatCollItemsIdentifier.add(tableRowFormatCollItemsIdentifier751.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:103: ( tableRowFormatMapKeysIdentifier )?
			int alt226=2;
			alt226 = dfa226.predict(input);
			switch (alt226) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:103: tableRowFormatMapKeysIdentifier
					{
					pushFollow(FOLLOW_tableRowFormatMapKeysIdentifier_in_rowFormatDelimited11947);
					tableRowFormatMapKeysIdentifier752=tableRowFormatMapKeysIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableRowFormatMapKeysIdentifier.add(tableRowFormatMapKeysIdentifier752.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:136: ( tableRowFormatLinesIdentifier )?
			int alt227=2;
			int LA227_0 = input.LA(1);
			if ( (LA227_0==KW_LINES) ) {
				alt227=1;
			}
			switch (alt227) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:136: tableRowFormatLinesIdentifier
					{
					pushFollow(FOLLOW_tableRowFormatLinesIdentifier_in_rowFormatDelimited11950);
					tableRowFormatLinesIdentifier753=tableRowFormatLinesIdentifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableRowFormatLinesIdentifier.add(tableRowFormatLinesIdentifier753.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:167: ( tableRowNullFormat )?
			int alt228=2;
			int LA228_0 = input.LA(1);
			if ( (LA228_0==KW_NULL) ) {
				alt228=1;
			}
			switch (alt228) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2032:167: tableRowNullFormat
					{
					pushFollow(FOLLOW_tableRowNullFormat_in_rowFormatDelimited11953);
					tableRowNullFormat754=tableRowNullFormat();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableRowNullFormat.add(tableRowNullFormat754.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: tableRowFormatCollItemsIdentifier, tableRowFormatFieldIdentifier, tableRowNullFormat, tableRowFormatMapKeysIdentifier, tableRowFormatLinesIdentifier
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2033:5: -> ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2033:8: ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SERDEPROPS, "TOK_SERDEPROPS"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2033:25: ( tableRowFormatFieldIdentifier )?
				if ( stream_tableRowFormatFieldIdentifier.hasNext() ) {
					adaptor.addChild(root_1, stream_tableRowFormatFieldIdentifier.nextTree());
				}
				stream_tableRowFormatFieldIdentifier.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2033:56: ( tableRowFormatCollItemsIdentifier )?
				if ( stream_tableRowFormatCollItemsIdentifier.hasNext() ) {
					adaptor.addChild(root_1, stream_tableRowFormatCollItemsIdentifier.nextTree());
				}
				stream_tableRowFormatCollItemsIdentifier.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2033:91: ( tableRowFormatMapKeysIdentifier )?
				if ( stream_tableRowFormatMapKeysIdentifier.hasNext() ) {
					adaptor.addChild(root_1, stream_tableRowFormatMapKeysIdentifier.nextTree());
				}
				stream_tableRowFormatMapKeysIdentifier.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2033:124: ( tableRowFormatLinesIdentifier )?
				if ( stream_tableRowFormatLinesIdentifier.hasNext() ) {
					adaptor.addChild(root_1, stream_tableRowFormatLinesIdentifier.nextTree());
				}
				stream_tableRowFormatLinesIdentifier.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2033:155: ( tableRowNullFormat )?
				if ( stream_tableRowNullFormat.hasNext() ) {
					adaptor.addChild(root_1, stream_tableRowNullFormat.nextTree());
				}
				stream_tableRowNullFormat.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "rowFormatDelimited"


	public static class tableRowFormat_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableRowFormat"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2036:1: tableRowFormat : ( rowFormatDelimited -> ^( TOK_TABLEROWFORMAT rowFormatDelimited ) | rowFormatSerde -> ^( TOK_TABLESERIALIZER rowFormatSerde ) );
	public final HiveParser.tableRowFormat_return tableRowFormat() throws RecognitionException {
		HiveParser.tableRowFormat_return retval = new HiveParser.tableRowFormat_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope rowFormatDelimited755 =null;
		ParserRuleReturnScope rowFormatSerde756 =null;

		RewriteRuleSubtreeStream stream_rowFormatSerde=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatSerde");
		RewriteRuleSubtreeStream stream_rowFormatDelimited=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatDelimited");

		 pushMsg("table row format specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2039:5: ( rowFormatDelimited -> ^( TOK_TABLEROWFORMAT rowFormatDelimited ) | rowFormatSerde -> ^( TOK_TABLESERIALIZER rowFormatSerde ) )
			int alt229=2;
			int LA229_0 = input.LA(1);
			if ( (LA229_0==KW_ROW) ) {
				int LA229_1 = input.LA(2);
				if ( (LA229_1==KW_FORMAT) ) {
					int LA229_2 = input.LA(3);
					if ( (LA229_2==KW_DELIMITED) ) {
						alt229=1;
					}
					else if ( (LA229_2==KW_SERDE) ) {
						alt229=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 229, 2, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 229, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 229, 0, input);
				throw nvae;
			}

			switch (alt229) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2040:7: rowFormatDelimited
					{
					pushFollow(FOLLOW_rowFormatDelimited_in_tableRowFormat12012);
					rowFormatDelimited755=rowFormatDelimited();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_rowFormatDelimited.add(rowFormatDelimited755.getTree());
					// AST REWRITE
					// elements: rowFormatDelimited
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2041:5: -> ^( TOK_TABLEROWFORMAT rowFormatDelimited )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2041:8: ^( TOK_TABLEROWFORMAT rowFormatDelimited )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEROWFORMAT, "TOK_TABLEROWFORMAT"), root_1);
						adaptor.addChild(root_1, stream_rowFormatDelimited.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2042:7: rowFormatSerde
					{
					pushFollow(FOLLOW_rowFormatSerde_in_tableRowFormat12032);
					rowFormatSerde756=rowFormatSerde();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_rowFormatSerde.add(rowFormatSerde756.getTree());
					// AST REWRITE
					// elements: rowFormatSerde
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2043:5: -> ^( TOK_TABLESERIALIZER rowFormatSerde )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2043:8: ^( TOK_TABLESERIALIZER rowFormatSerde )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLESERIALIZER, "TOK_TABLESERIALIZER"), root_1);
						adaptor.addChild(root_1, stream_rowFormatSerde.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableRowFormat"


	public static class tablePropertiesPrefixed_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tablePropertiesPrefixed"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2046:1: tablePropertiesPrefixed : KW_TBLPROPERTIES ! tableProperties ;
	public final HiveParser.tablePropertiesPrefixed_return tablePropertiesPrefixed() throws RecognitionException {
		HiveParser.tablePropertiesPrefixed_return retval = new HiveParser.tablePropertiesPrefixed_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_TBLPROPERTIES757=null;
		ParserRuleReturnScope tableProperties758 =null;

		ASTNode KW_TBLPROPERTIES757_tree=null;

		 pushMsg("table properties with prefix", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2049:5: ( KW_TBLPROPERTIES ! tableProperties )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2050:9: KW_TBLPROPERTIES ! tableProperties
			{
			root_0 = (ASTNode)adaptor.nil();


			KW_TBLPROPERTIES757=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_tablePropertiesPrefixed12079); if (state.failed) return retval;
			pushFollow(FOLLOW_tableProperties_in_tablePropertiesPrefixed12082);
			tableProperties758=tableProperties();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, tableProperties758.getTree());

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tablePropertiesPrefixed"


	public static class tableProperties_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableProperties"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2053:1: tableProperties : LPAREN tablePropertiesList RPAREN -> ^( TOK_TABLEPROPERTIES tablePropertiesList ) ;
	public final HiveParser.tableProperties_return tableProperties() throws RecognitionException {
		HiveParser.tableProperties_return retval = new HiveParser.tableProperties_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN759=null;
		Token RPAREN761=null;
		ParserRuleReturnScope tablePropertiesList760 =null;

		ASTNode LPAREN759_tree=null;
		ASTNode RPAREN761_tree=null;
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_tablePropertiesList=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesList");

		 pushMsg("table properties", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2056:5: ( LPAREN tablePropertiesList RPAREN -> ^( TOK_TABLEPROPERTIES tablePropertiesList ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2057:7: LPAREN tablePropertiesList RPAREN
			{
			LPAREN759=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableProperties12115); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN759);

			pushFollow(FOLLOW_tablePropertiesList_in_tableProperties12117);
			tablePropertiesList760=tablePropertiesList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tablePropertiesList.add(tablePropertiesList760.getTree());
			RPAREN761=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableProperties12119); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN761);

			// AST REWRITE
			// elements: tablePropertiesList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2057:41: -> ^( TOK_TABLEPROPERTIES tablePropertiesList )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2057:44: ^( TOK_TABLEPROPERTIES tablePropertiesList )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEPROPERTIES, "TOK_TABLEPROPERTIES"), root_1);
				adaptor.addChild(root_1, stream_tablePropertiesList.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableProperties"


	public static class tablePropertiesList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tablePropertiesList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2060:1: tablePropertiesList : ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_TABLEPROPLIST ( keyValueProperty )+ ) | keyProperty ( COMMA keyProperty )* -> ^( TOK_TABLEPROPLIST ( keyProperty )+ ) );
	public final HiveParser.tablePropertiesList_return tablePropertiesList() throws RecognitionException {
		HiveParser.tablePropertiesList_return retval = new HiveParser.tablePropertiesList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA763=null;
		Token COMMA766=null;
		ParserRuleReturnScope keyValueProperty762 =null;
		ParserRuleReturnScope keyValueProperty764 =null;
		ParserRuleReturnScope keyProperty765 =null;
		ParserRuleReturnScope keyProperty767 =null;

		ASTNode COMMA763_tree=null;
		ASTNode COMMA766_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_keyValueProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyValueProperty");
		RewriteRuleSubtreeStream stream_keyProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyProperty");

		 pushMsg("table properties list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2063:5: ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_TABLEPROPLIST ( keyValueProperty )+ ) | keyProperty ( COMMA keyProperty )* -> ^( TOK_TABLEPROPLIST ( keyProperty )+ ) )
			int alt232=2;
			int LA232_0 = input.LA(1);
			if ( (LA232_0==StringLiteral) ) {
				int LA232_1 = input.LA(2);
				if ( (LA232_1==EQUAL) ) {
					alt232=1;
				}
				else if ( (LA232_1==COMMA||LA232_1==RPAREN) ) {
					alt232=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 232, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 232, 0, input);
				throw nvae;
			}

			switch (alt232) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2064:7: keyValueProperty ( COMMA keyValueProperty )*
					{
					pushFollow(FOLLOW_keyValueProperty_in_tablePropertiesList12160);
					keyValueProperty762=keyValueProperty();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_keyValueProperty.add(keyValueProperty762.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2064:24: ( COMMA keyValueProperty )*
					loop230:
					while (true) {
						int alt230=2;
						int LA230_0 = input.LA(1);
						if ( (LA230_0==COMMA) ) {
							alt230=1;
						}

						switch (alt230) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2064:25: COMMA keyValueProperty
							{
							COMMA763=(Token)match(input,COMMA,FOLLOW_COMMA_in_tablePropertiesList12163); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_COMMA.add(COMMA763);

							pushFollow(FOLLOW_keyValueProperty_in_tablePropertiesList12165);
							keyValueProperty764=keyValueProperty();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_keyValueProperty.add(keyValueProperty764.getTree());
							}
							break;

						default :
							break loop230;
						}
					}

					// AST REWRITE
					// elements: keyValueProperty
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2064:50: -> ^( TOK_TABLEPROPLIST ( keyValueProperty )+ )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2064:53: ^( TOK_TABLEPROPLIST ( keyValueProperty )+ )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEPROPLIST, "TOK_TABLEPROPLIST"), root_1);
						if ( !(stream_keyValueProperty.hasNext()) ) {
							throw new RewriteEarlyExitException();
						}
						while ( stream_keyValueProperty.hasNext() ) {
							adaptor.addChild(root_1, stream_keyValueProperty.nextTree());
						}
						stream_keyValueProperty.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2066:7: keyProperty ( COMMA keyProperty )*
					{
					pushFollow(FOLLOW_keyProperty_in_tablePropertiesList12190);
					keyProperty765=keyProperty();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_keyProperty.add(keyProperty765.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2066:19: ( COMMA keyProperty )*
					loop231:
					while (true) {
						int alt231=2;
						int LA231_0 = input.LA(1);
						if ( (LA231_0==COMMA) ) {
							alt231=1;
						}

						switch (alt231) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2066:20: COMMA keyProperty
							{
							COMMA766=(Token)match(input,COMMA,FOLLOW_COMMA_in_tablePropertiesList12193); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_COMMA.add(COMMA766);

							pushFollow(FOLLOW_keyProperty_in_tablePropertiesList12195);
							keyProperty767=keyProperty();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_keyProperty.add(keyProperty767.getTree());
							}
							break;

						default :
							break loop231;
						}
					}

					// AST REWRITE
					// elements: keyProperty
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2066:40: -> ^( TOK_TABLEPROPLIST ( keyProperty )+ )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2066:43: ^( TOK_TABLEPROPLIST ( keyProperty )+ )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEPROPLIST, "TOK_TABLEPROPLIST"), root_1);
						if ( !(stream_keyProperty.hasNext()) ) {
							throw new RewriteEarlyExitException();
						}
						while ( stream_keyProperty.hasNext() ) {
							adaptor.addChild(root_1, stream_keyProperty.nextTree());
						}
						stream_keyProperty.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tablePropertiesList"


	public static class keyValueProperty_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "keyValueProperty"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2069:1: keyValueProperty : key= StringLiteral EQUAL value= StringLiteral -> ^( TOK_TABLEPROPERTY $key $value) ;
	public final HiveParser.keyValueProperty_return keyValueProperty() throws RecognitionException {
		HiveParser.keyValueProperty_return retval = new HiveParser.keyValueProperty_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token key=null;
		Token value=null;
		Token EQUAL768=null;

		ASTNode key_tree=null;
		ASTNode value_tree=null;
		ASTNode EQUAL768_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_EQUAL=new RewriteRuleTokenStream(adaptor,"token EQUAL");

		 pushMsg("specifying key/value property", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2072:5: (key= StringLiteral EQUAL value= StringLiteral -> ^( TOK_TABLEPROPERTY $key $value) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2073:7: key= StringLiteral EQUAL value= StringLiteral
			{
			key=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_keyValueProperty12241); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(key);

			EQUAL768=(Token)match(input,EQUAL,FOLLOW_EQUAL_in_keyValueProperty12243); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_EQUAL.add(EQUAL768);

			value=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_keyValueProperty12247); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(value);

			// AST REWRITE
			// elements: key, value
			// token labels: value, key
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_value=new RewriteRuleTokenStream(adaptor,"token value",value);
			RewriteRuleTokenStream stream_key=new RewriteRuleTokenStream(adaptor,"token key",key);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2073:51: -> ^( TOK_TABLEPROPERTY $key $value)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2073:54: ^( TOK_TABLEPROPERTY $key $value)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY"), root_1);
				adaptor.addChild(root_1, stream_key.nextNode());
				adaptor.addChild(root_1, stream_value.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "keyValueProperty"


	public static class keyProperty_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "keyProperty"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2076:1: keyProperty : key= StringLiteral -> ^( TOK_TABLEPROPERTY $key TOK_NULL ) ;
	public final HiveParser.keyProperty_return keyProperty() throws RecognitionException {
		HiveParser.keyProperty_return retval = new HiveParser.keyProperty_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token key=null;

		ASTNode key_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");

		 pushMsg("specifying key property", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2079:5: (key= StringLiteral -> ^( TOK_TABLEPROPERTY $key TOK_NULL ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2080:7: key= StringLiteral
			{
			key=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_keyProperty12294); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(key);

			// AST REWRITE
			// elements: key
			// token labels: key
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_key=new RewriteRuleTokenStream(adaptor,"token key",key);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2080:25: -> ^( TOK_TABLEPROPERTY $key TOK_NULL )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2080:28: ^( TOK_TABLEPROPERTY $key TOK_NULL )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY"), root_1);
				adaptor.addChild(root_1, stream_key.nextNode());
				adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_NULL, "TOK_NULL"));
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "keyProperty"


	public static class tableRowFormatFieldIdentifier_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableRowFormatFieldIdentifier"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2083:1: tableRowFormatFieldIdentifier : KW_FIELDS KW_TERMINATED KW_BY fldIdnt= StringLiteral ( KW_ESCAPED KW_BY fldEscape= StringLiteral )? -> ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? ) ;
	public final HiveParser.tableRowFormatFieldIdentifier_return tableRowFormatFieldIdentifier() throws RecognitionException {
		HiveParser.tableRowFormatFieldIdentifier_return retval = new HiveParser.tableRowFormatFieldIdentifier_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token fldIdnt=null;
		Token fldEscape=null;
		Token KW_FIELDS769=null;
		Token KW_TERMINATED770=null;
		Token KW_BY771=null;
		Token KW_ESCAPED772=null;
		Token KW_BY773=null;

		ASTNode fldIdnt_tree=null;
		ASTNode fldEscape_tree=null;
		ASTNode KW_FIELDS769_tree=null;
		ASTNode KW_TERMINATED770_tree=null;
		ASTNode KW_BY771_tree=null;
		ASTNode KW_ESCAPED772_tree=null;
		ASTNode KW_BY773_tree=null;
		RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_ESCAPED=new RewriteRuleTokenStream(adaptor,"token KW_ESCAPED");
		RewriteRuleTokenStream stream_KW_FIELDS=new RewriteRuleTokenStream(adaptor,"token KW_FIELDS");

		 pushMsg("table row format's field separator", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2086:5: ( KW_FIELDS KW_TERMINATED KW_BY fldIdnt= StringLiteral ( KW_ESCAPED KW_BY fldEscape= StringLiteral )? -> ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2087:7: KW_FIELDS KW_TERMINATED KW_BY fldIdnt= StringLiteral ( KW_ESCAPED KW_BY fldEscape= StringLiteral )?
			{
			KW_FIELDS769=(Token)match(input,KW_FIELDS,FOLLOW_KW_FIELDS_in_tableRowFormatFieldIdentifier12338); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FIELDS.add(KW_FIELDS769);

			KW_TERMINATED770=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatFieldIdentifier12340); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TERMINATED.add(KW_TERMINATED770);

			KW_BY771=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatFieldIdentifier12342); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY771);

			fldIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatFieldIdentifier12346); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(fldIdnt);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2087:59: ( KW_ESCAPED KW_BY fldEscape= StringLiteral )?
			int alt233=2;
			int LA233_0 = input.LA(1);
			if ( (LA233_0==KW_ESCAPED) ) {
				alt233=1;
			}
			switch (alt233) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2087:60: KW_ESCAPED KW_BY fldEscape= StringLiteral
					{
					KW_ESCAPED772=(Token)match(input,KW_ESCAPED,FOLLOW_KW_ESCAPED_in_tableRowFormatFieldIdentifier12349); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ESCAPED.add(KW_ESCAPED772);

					KW_BY773=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatFieldIdentifier12351); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY773);

					fldEscape=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatFieldIdentifier12355); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(fldEscape);

					}
					break;

			}

			// AST REWRITE
			// elements: fldIdnt, fldEscape
			// token labels: fldIdnt, fldEscape
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_fldIdnt=new RewriteRuleTokenStream(adaptor,"token fldIdnt",fldIdnt);
			RewriteRuleTokenStream stream_fldEscape=new RewriteRuleTokenStream(adaptor,"token fldEscape",fldEscape);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2088:5: -> ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2088:8: ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEROWFORMATFIELD, "TOK_TABLEROWFORMATFIELD"), root_1);
				adaptor.addChild(root_1, stream_fldIdnt.nextNode());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2088:44: ( $fldEscape)?
				if ( stream_fldEscape.hasNext() ) {
					adaptor.addChild(root_1, stream_fldEscape.nextNode());
				}
				stream_fldEscape.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableRowFormatFieldIdentifier"


	public static class tableRowFormatCollItemsIdentifier_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableRowFormatCollItemsIdentifier"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2091:1: tableRowFormatCollItemsIdentifier : KW_COLLECTION KW_ITEMS KW_TERMINATED KW_BY collIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt) ;
	public final HiveParser.tableRowFormatCollItemsIdentifier_return tableRowFormatCollItemsIdentifier() throws RecognitionException {
		HiveParser.tableRowFormatCollItemsIdentifier_return retval = new HiveParser.tableRowFormatCollItemsIdentifier_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token collIdnt=null;
		Token KW_COLLECTION774=null;
		Token KW_ITEMS775=null;
		Token KW_TERMINATED776=null;
		Token KW_BY777=null;

		ASTNode collIdnt_tree=null;
		ASTNode KW_COLLECTION774_tree=null;
		ASTNode KW_ITEMS775_tree=null;
		ASTNode KW_TERMINATED776_tree=null;
		ASTNode KW_BY777_tree=null;
		RewriteRuleTokenStream stream_KW_COLLECTION=new RewriteRuleTokenStream(adaptor,"token KW_COLLECTION");
		RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
		RewriteRuleTokenStream stream_KW_ITEMS=new RewriteRuleTokenStream(adaptor,"token KW_ITEMS");
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");

		 pushMsg("table row format's column separator", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2094:5: ( KW_COLLECTION KW_ITEMS KW_TERMINATED KW_BY collIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2095:7: KW_COLLECTION KW_ITEMS KW_TERMINATED KW_BY collIdnt= StringLiteral
			{
			KW_COLLECTION774=(Token)match(input,KW_COLLECTION,FOLLOW_KW_COLLECTION_in_tableRowFormatCollItemsIdentifier12407); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COLLECTION.add(KW_COLLECTION774);

			KW_ITEMS775=(Token)match(input,KW_ITEMS,FOLLOW_KW_ITEMS_in_tableRowFormatCollItemsIdentifier12409); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ITEMS.add(KW_ITEMS775);

			KW_TERMINATED776=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatCollItemsIdentifier12411); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TERMINATED.add(KW_TERMINATED776);

			KW_BY777=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatCollItemsIdentifier12413); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY777);

			collIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatCollItemsIdentifier12417); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(collIdnt);

			// AST REWRITE
			// elements: collIdnt
			// token labels: collIdnt
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_collIdnt=new RewriteRuleTokenStream(adaptor,"token collIdnt",collIdnt);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2096:5: -> ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2096:8: ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEROWFORMATCOLLITEMS, "TOK_TABLEROWFORMATCOLLITEMS"), root_1);
				adaptor.addChild(root_1, stream_collIdnt.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableRowFormatCollItemsIdentifier"


	public static class tableRowFormatMapKeysIdentifier_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableRowFormatMapKeysIdentifier"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2099:1: tableRowFormatMapKeysIdentifier : KW_MAP KW_KEYS KW_TERMINATED KW_BY mapKeysIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt) ;
	public final HiveParser.tableRowFormatMapKeysIdentifier_return tableRowFormatMapKeysIdentifier() throws RecognitionException {
		HiveParser.tableRowFormatMapKeysIdentifier_return retval = new HiveParser.tableRowFormatMapKeysIdentifier_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token mapKeysIdnt=null;
		Token KW_MAP778=null;
		Token KW_KEYS779=null;
		Token KW_TERMINATED780=null;
		Token KW_BY781=null;

		ASTNode mapKeysIdnt_tree=null;
		ASTNode KW_MAP778_tree=null;
		ASTNode KW_KEYS779_tree=null;
		ASTNode KW_TERMINATED780_tree=null;
		ASTNode KW_BY781_tree=null;
		RewriteRuleTokenStream stream_KW_KEYS=new RewriteRuleTokenStream(adaptor,"token KW_KEYS");
		RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_MAP=new RewriteRuleTokenStream(adaptor,"token KW_MAP");

		 pushMsg("table row format's map key separator", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2102:5: ( KW_MAP KW_KEYS KW_TERMINATED KW_BY mapKeysIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2103:7: KW_MAP KW_KEYS KW_TERMINATED KW_BY mapKeysIdnt= StringLiteral
			{
			KW_MAP778=(Token)match(input,KW_MAP,FOLLOW_KW_MAP_in_tableRowFormatMapKeysIdentifier12463); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MAP.add(KW_MAP778);

			KW_KEYS779=(Token)match(input,KW_KEYS,FOLLOW_KW_KEYS_in_tableRowFormatMapKeysIdentifier12465); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_KEYS.add(KW_KEYS779);

			KW_TERMINATED780=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatMapKeysIdentifier12467); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TERMINATED.add(KW_TERMINATED780);

			KW_BY781=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatMapKeysIdentifier12469); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY781);

			mapKeysIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatMapKeysIdentifier12473); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(mapKeysIdnt);

			// AST REWRITE
			// elements: mapKeysIdnt
			// token labels: mapKeysIdnt
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_mapKeysIdnt=new RewriteRuleTokenStream(adaptor,"token mapKeysIdnt",mapKeysIdnt);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2104:5: -> ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2104:8: ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEROWFORMATMAPKEYS, "TOK_TABLEROWFORMATMAPKEYS"), root_1);
				adaptor.addChild(root_1, stream_mapKeysIdnt.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableRowFormatMapKeysIdentifier"


	public static class tableRowFormatLinesIdentifier_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableRowFormatLinesIdentifier"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2107:1: tableRowFormatLinesIdentifier : KW_LINES KW_TERMINATED KW_BY linesIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATLINES $linesIdnt) ;
	public final HiveParser.tableRowFormatLinesIdentifier_return tableRowFormatLinesIdentifier() throws RecognitionException {
		HiveParser.tableRowFormatLinesIdentifier_return retval = new HiveParser.tableRowFormatLinesIdentifier_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token linesIdnt=null;
		Token KW_LINES782=null;
		Token KW_TERMINATED783=null;
		Token KW_BY784=null;

		ASTNode linesIdnt_tree=null;
		ASTNode KW_LINES782_tree=null;
		ASTNode KW_TERMINATED783_tree=null;
		ASTNode KW_BY784_tree=null;
		RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_LINES=new RewriteRuleTokenStream(adaptor,"token KW_LINES");

		 pushMsg("table row format's line separator", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2110:5: ( KW_LINES KW_TERMINATED KW_BY linesIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATLINES $linesIdnt) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2111:7: KW_LINES KW_TERMINATED KW_BY linesIdnt= StringLiteral
			{
			KW_LINES782=(Token)match(input,KW_LINES,FOLLOW_KW_LINES_in_tableRowFormatLinesIdentifier12519); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LINES.add(KW_LINES782);

			KW_TERMINATED783=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatLinesIdentifier12521); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TERMINATED.add(KW_TERMINATED783);

			KW_BY784=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatLinesIdentifier12523); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY784);

			linesIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatLinesIdentifier12527); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(linesIdnt);

			// AST REWRITE
			// elements: linesIdnt
			// token labels: linesIdnt
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_linesIdnt=new RewriteRuleTokenStream(adaptor,"token linesIdnt",linesIdnt);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2112:5: -> ^( TOK_TABLEROWFORMATLINES $linesIdnt)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2112:8: ^( TOK_TABLEROWFORMATLINES $linesIdnt)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEROWFORMATLINES, "TOK_TABLEROWFORMATLINES"), root_1);
				adaptor.addChild(root_1, stream_linesIdnt.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableRowFormatLinesIdentifier"


	public static class tableRowNullFormat_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableRowNullFormat"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2115:1: tableRowNullFormat : KW_NULL KW_DEFINED KW_AS nullIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATNULL $nullIdnt) ;
	public final HiveParser.tableRowNullFormat_return tableRowNullFormat() throws RecognitionException {
		HiveParser.tableRowNullFormat_return retval = new HiveParser.tableRowNullFormat_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token nullIdnt=null;
		Token KW_NULL785=null;
		Token KW_DEFINED786=null;
		Token KW_AS787=null;

		ASTNode nullIdnt_tree=null;
		ASTNode KW_NULL785_tree=null;
		ASTNode KW_DEFINED786_tree=null;
		ASTNode KW_AS787_tree=null;
		RewriteRuleTokenStream stream_KW_NULL=new RewriteRuleTokenStream(adaptor,"token KW_NULL");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleTokenStream stream_KW_DEFINED=new RewriteRuleTokenStream(adaptor,"token KW_DEFINED");

		 pushMsg("table row format's null specifier", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2118:5: ( KW_NULL KW_DEFINED KW_AS nullIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATNULL $nullIdnt) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2119:7: KW_NULL KW_DEFINED KW_AS nullIdnt= StringLiteral
			{
			KW_NULL785=(Token)match(input,KW_NULL,FOLLOW_KW_NULL_in_tableRowNullFormat12573); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_NULL.add(KW_NULL785);

			KW_DEFINED786=(Token)match(input,KW_DEFINED,FOLLOW_KW_DEFINED_in_tableRowNullFormat12575); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DEFINED.add(KW_DEFINED786);

			KW_AS787=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_tableRowNullFormat12577); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS787);

			nullIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowNullFormat12581); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(nullIdnt);

			// AST REWRITE
			// elements: nullIdnt
			// token labels: nullIdnt
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_nullIdnt=new RewriteRuleTokenStream(adaptor,"token nullIdnt",nullIdnt);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2120:5: -> ^( TOK_TABLEROWFORMATNULL $nullIdnt)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2120:8: ^( TOK_TABLEROWFORMATNULL $nullIdnt)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEROWFORMATNULL, "TOK_TABLEROWFORMATNULL"), root_1);
				adaptor.addChild(root_1, stream_nullIdnt.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableRowNullFormat"


	public static class tableFileFormat_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableFileFormat"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2122:1: tableFileFormat : ( ( KW_STORED KW_AS KW_INPUTFORMAT )=> KW_STORED KW_AS KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? ) | KW_STORED KW_BY storageHandler= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? ) | KW_STORED KW_AS genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) );
	public final HiveParser.tableFileFormat_return tableFileFormat() throws RecognitionException {
		HiveParser.tableFileFormat_return retval = new HiveParser.tableFileFormat_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token inFmt=null;
		Token outFmt=null;
		Token inDriver=null;
		Token outDriver=null;
		Token storageHandler=null;
		Token KW_STORED788=null;
		Token KW_AS789=null;
		Token KW_INPUTFORMAT790=null;
		Token KW_OUTPUTFORMAT791=null;
		Token KW_INPUTDRIVER792=null;
		Token KW_OUTPUTDRIVER793=null;
		Token KW_STORED794=null;
		Token KW_BY795=null;
		Token KW_WITH796=null;
		Token KW_SERDEPROPERTIES797=null;
		Token KW_STORED798=null;
		Token KW_AS799=null;
		ParserRuleReturnScope serdeprops =null;
		ParserRuleReturnScope genericSpec =null;

		ASTNode inFmt_tree=null;
		ASTNode outFmt_tree=null;
		ASTNode inDriver_tree=null;
		ASTNode outDriver_tree=null;
		ASTNode storageHandler_tree=null;
		ASTNode KW_STORED788_tree=null;
		ASTNode KW_AS789_tree=null;
		ASTNode KW_INPUTFORMAT790_tree=null;
		ASTNode KW_OUTPUTFORMAT791_tree=null;
		ASTNode KW_INPUTDRIVER792_tree=null;
		ASTNode KW_OUTPUTDRIVER793_tree=null;
		ASTNode KW_STORED794_tree=null;
		ASTNode KW_BY795_tree=null;
		ASTNode KW_WITH796_tree=null;
		ASTNode KW_SERDEPROPERTIES797_tree=null;
		ASTNode KW_STORED798_tree=null;
		ASTNode KW_AS799_tree=null;
		RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
		RewriteRuleTokenStream stream_KW_INPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_INPUTFORMAT");
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_SERDEPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_SERDEPROPERTIES");
		RewriteRuleTokenStream stream_KW_INPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_INPUTDRIVER");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleTokenStream stream_KW_OUTPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTFORMAT");
		RewriteRuleTokenStream stream_KW_STORED=new RewriteRuleTokenStream(adaptor,"token KW_STORED");
		RewriteRuleTokenStream stream_KW_OUTPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTDRIVER");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");

		 pushMsg("table file format specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2125:5: ( ( KW_STORED KW_AS KW_INPUTFORMAT )=> KW_STORED KW_AS KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? ) | KW_STORED KW_BY storageHandler= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? ) | KW_STORED KW_AS genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) )
			int alt236=3;
			int LA236_0 = input.LA(1);
			if ( (LA236_0==KW_STORED) ) {
				int LA236_1 = input.LA(2);
				if ( (LA236_1==KW_AS) ) {
					int LA236_2 = input.LA(3);
					if ( (LA236_2==KW_INPUTFORMAT) ) {
						int LA236_4 = input.LA(4);
						if ( (synpred18_HiveParser()) ) {
							alt236=1;
						}
						else if ( (true) ) {
							alt236=3;
						}

					}
					else if ( (LA236_2==Identifier||(LA236_2 >= KW_ABORT && LA236_2 <= KW_AFTER)||LA236_2==KW_ALLOC_FRACTION||LA236_2==KW_ANALYZE||LA236_2==KW_ARCHIVE||LA236_2==KW_ASC||(LA236_2 >= KW_AUTOCOMMIT && LA236_2 <= KW_BEFORE)||(LA236_2 >= KW_BUCKET && LA236_2 <= KW_BUCKETS)||(LA236_2 >= KW_CACHE && LA236_2 <= KW_CASCADE)||LA236_2==KW_CHANGE||(LA236_2 >= KW_CHECK && LA236_2 <= KW_COLLECTION)||(LA236_2 >= KW_COLUMNS && LA236_2 <= KW_COMMENT)||(LA236_2 >= KW_COMPACT && LA236_2 <= KW_CONCATENATE)||LA236_2==KW_CONTINUE||LA236_2==KW_DATA||LA236_2==KW_DATABASES||(LA236_2 >= KW_DATETIME && LA236_2 <= KW_DBPROPERTIES)||(LA236_2 >= KW_DEFAULT && LA236_2 <= KW_DEFINED)||(LA236_2 >= KW_DELIMITED && LA236_2 <= KW_DESC)||(LA236_2 >= KW_DETAIL && LA236_2 <= KW_DISABLE)||(LA236_2 >= KW_DISTRIBUTE && LA236_2 <= KW_DO)||LA236_2==KW_DOW||(LA236_2 >= KW_DUMP && LA236_2 <= KW_ELEM_TYPE)||LA236_2==KW_ENABLE||(LA236_2 >= KW_ENFORCED && LA236_2 <= KW_ESCAPED)||LA236_2==KW_EXCLUSIVE||(LA236_2 >= KW_EXPLAIN && LA236_2 <= KW_EXPRESSION)||(LA236_2 >= KW_FIELDS && LA236_2 <= KW_FIRST)||(LA236_2 >= KW_FORMAT && LA236_2 <= KW_FORMATTED)||LA236_2==KW_FUNCTIONS||(LA236_2 >= KW_HOUR && LA236_2 <= KW_IDXPROPERTIES)||(LA236_2 >= KW_INDEX && LA236_2 <= KW_INDEXES)||(LA236_2 >= KW_INPATH && LA236_2 <= KW_INPUTDRIVER)||(LA236_2 >= KW_ISOLATION && LA236_2 <= KW_JAR)||(LA236_2 >= KW_KEY && LA236_2 <= KW_LAST)||LA236_2==KW_LEVEL||(LA236_2 >= KW_LIMIT && LA236_2 <= KW_LOAD)||(LA236_2 >= KW_LOCATION && LA236_2 <= KW_LONG)||LA236_2==KW_MANAGEMENT||(LA236_2 >= KW_MAPJOIN && LA236_2 <= KW_MATERIALIZED)||LA236_2==KW_METADATA||(LA236_2 >= KW_MINUTE && LA236_2 <= KW_MONTH)||(LA236_2 >= KW_MOVE && LA236_2 <= KW_MSCK)||(LA236_2 >= KW_NORELY && LA236_2 <= KW_NOSCAN)||LA236_2==KW_NOVALIDATE||LA236_2==KW_NULLS||LA236_2==KW_OFFSET||(LA236_2 >= KW_OPERATOR && LA236_2 <= KW_OPTION)||(LA236_2 >= KW_OUTPUTDRIVER && LA236_2 <= KW_OUTPUTFORMAT)||(LA236_2 >= KW_OVERWRITE && LA236_2 <= KW_OWNER)||(LA236_2 >= KW_PARTITIONED && LA236_2 <= KW_PATH)||(LA236_2 >= KW_PLAN && LA236_2 <= KW_POOL)||LA236_2==KW_PRINCIPALS||(LA236_2 >= KW_PURGE && LA236_2 <= KW_QUERY_PARALLELISM)||LA236_2==KW_READ||(LA236_2 >= KW_REBUILD && LA236_2 <= KW_RECORDWRITER)||(LA236_2 >= KW_RELOAD && LA236_2 <= KW_RESTRICT)||LA236_2==KW_REWRITE||(LA236_2 >= KW_ROLE && LA236_2 <= KW_ROLES)||(LA236_2 >= KW_SCHEDULING_POLICY && LA236_2 <= KW_SECOND)||(LA236_2 >= KW_SEMI && LA236_2 <= KW_SERVER)||(LA236_2 >= KW_SETS && LA236_2 <= KW_SKEWED)||(LA236_2 >= KW_SNAPSHOT && LA236_2 <= KW_SSL)||(LA236_2 >= KW_STATISTICS && LA236_2 <= KW_SUMMARY)||LA236_2==KW_TABLES||(LA236_2 >= KW_TBLPROPERTIES && LA236_2 <= KW_TERMINATED)||LA236_2==KW_TINYINT||(LA236_2 >= KW_TOUCH && LA236_2 <= KW_TRANSACTIONS)||LA236_2==KW_UNARCHIVE||LA236_2==KW_UNDO||LA236_2==KW_UNIONTYPE||(LA236_2 >= KW_UNLOCK && LA236_2 <= KW_UNSIGNED)||(LA236_2 >= KW_URI && LA236_2 <= KW_USE)||(LA236_2 >= KW_UTC && LA236_2 <= KW_VALIDATE)||LA236_2==KW_VALUE_TYPE||(LA236_2 >= KW_VECTORIZATION && LA236_2 <= KW_WEEK)||LA236_2==KW_WHILE||(LA236_2 >= KW_WORK && LA236_2 <= KW_ZONE)||LA236_2==KW_BATCH||LA236_2==KW_DAYOFWEEK||LA236_2==KW_HOLD_DDLTIME||LA236_2==KW_IGNORE||LA236_2==KW_NO_DROP||LA236_2==KW_OFFLINE||LA236_2==KW_PROTECTION||LA236_2==KW_READONLY||LA236_2==KW_TIMESTAMPTZ) ) {
						alt236=3;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 236, 2, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}
				else if ( (LA236_1==KW_BY) ) {
					alt236=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 236, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 236, 0, input);
				throw nvae;
			}

			switch (alt236) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:7: ( KW_STORED KW_AS KW_INPUTFORMAT )=> KW_STORED KW_AS KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
					{
					KW_STORED788=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_tableFileFormat12636); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_STORED.add(KW_STORED788);

					KW_AS789=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_tableFileFormat12638); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS789);

					KW_INPUTFORMAT790=(Token)match(input,KW_INPUTFORMAT,FOLLOW_KW_INPUTFORMAT_in_tableFileFormat12640); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INPUTFORMAT.add(KW_INPUTFORMAT790);

					inFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat12644); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(inFmt);

					KW_OUTPUTFORMAT791=(Token)match(input,KW_OUTPUTFORMAT,FOLLOW_KW_OUTPUTFORMAT_in_tableFileFormat12646); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_OUTPUTFORMAT.add(KW_OUTPUTFORMAT791);

					outFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat12650); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(outFmt);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:131: ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
					int alt234=2;
					int LA234_0 = input.LA(1);
					if ( (LA234_0==KW_INPUTDRIVER) ) {
						alt234=1;
					}
					switch (alt234) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:132: KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral
							{
							KW_INPUTDRIVER792=(Token)match(input,KW_INPUTDRIVER,FOLLOW_KW_INPUTDRIVER_in_tableFileFormat12653); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_INPUTDRIVER.add(KW_INPUTDRIVER792);

							inDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat12657); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_StringLiteral.add(inDriver);

							KW_OUTPUTDRIVER793=(Token)match(input,KW_OUTPUTDRIVER,FOLLOW_KW_OUTPUTDRIVER_in_tableFileFormat12659); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_OUTPUTDRIVER.add(KW_OUTPUTDRIVER793);

							outDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat12663); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_StringLiteral.add(outDriver);

							}
							break;

					}

					// AST REWRITE
					// elements: inDriver, inFmt, outFmt, outDriver
					// token labels: inFmt, inDriver, outDriver, outFmt
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_inFmt=new RewriteRuleTokenStream(adaptor,"token inFmt",inFmt);
					RewriteRuleTokenStream stream_inDriver=new RewriteRuleTokenStream(adaptor,"token inDriver",inDriver);
					RewriteRuleTokenStream stream_outDriver=new RewriteRuleTokenStream(adaptor,"token outDriver",outDriver);
					RewriteRuleTokenStream stream_outFmt=new RewriteRuleTokenStream(adaptor,"token outFmt",outFmt);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2127:7: -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2127:10: ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLEFILEFORMAT, "TOK_TABLEFILEFORMAT"), root_1);
						adaptor.addChild(root_1, stream_inFmt.nextNode());
						adaptor.addChild(root_1, stream_outFmt.nextNode());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2127:48: ( $inDriver)?
						if ( stream_inDriver.hasNext() ) {
							adaptor.addChild(root_1, stream_inDriver.nextNode());
						}
						stream_inDriver.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2127:59: ( $outDriver)?
						if ( stream_outDriver.hasNext() ) {
							adaptor.addChild(root_1, stream_outDriver.nextNode());
						}
						stream_outDriver.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2128:9: KW_STORED KW_BY storageHandler= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
					{
					KW_STORED794=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_tableFileFormat12701); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_STORED.add(KW_STORED794);

					KW_BY795=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableFileFormat12703); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_BY.add(KW_BY795);

					storageHandler=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat12707); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(storageHandler);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2129:10: ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
					int alt235=2;
					int LA235_0 = input.LA(1);
					if ( (LA235_0==KW_WITH) ) {
						alt235=1;
					}
					switch (alt235) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2129:11: KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties
							{
							KW_WITH796=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_tableFileFormat12719); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH796);

							KW_SERDEPROPERTIES797=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_tableFileFormat12721); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES797);

							pushFollow(FOLLOW_tableProperties_in_tableFileFormat12725);
							serdeprops=tableProperties();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableProperties.add(serdeprops.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: storageHandler, serdeprops
					// token labels: storageHandler
					// rule labels: serdeprops, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_storageHandler=new RewriteRuleTokenStream(adaptor,"token storageHandler",storageHandler);
					RewriteRuleSubtreeStream stream_serdeprops=new RewriteRuleSubtreeStream(adaptor,"rule serdeprops",serdeprops!=null?serdeprops.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2130:7: -> ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2130:10: ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_STORAGEHANDLER, "TOK_STORAGEHANDLER"), root_1);
						adaptor.addChild(root_1, stream_storageHandler.nextNode());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2130:48: ( $serdeprops)?
						if ( stream_serdeprops.hasNext() ) {
							adaptor.addChild(root_1, stream_serdeprops.nextTree());
						}
						stream_serdeprops.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2131:9: KW_STORED KW_AS genericSpec= identifier
					{
					KW_STORED798=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_tableFileFormat12756); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_STORED.add(KW_STORED798);

					KW_AS799=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_tableFileFormat12758); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS799);

					pushFollow(FOLLOW_identifier_in_tableFileFormat12762);
					genericSpec=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(genericSpec.getTree());
					// AST REWRITE
					// elements: genericSpec
					// token labels: 
					// rule labels: genericSpec, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_genericSpec=new RewriteRuleSubtreeStream(adaptor,"rule genericSpec",genericSpec!=null?genericSpec.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2132:7: -> ^( TOK_FILEFORMAT_GENERIC $genericSpec)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2132:10: ^( TOK_FILEFORMAT_GENERIC $genericSpec)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FILEFORMAT_GENERIC, "TOK_FILEFORMAT_GENERIC"), root_1);
						adaptor.addChild(root_1, stream_genericSpec.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableFileFormat"


	public static class tableLocation_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableLocation"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2135:1: tableLocation : KW_LOCATION locn= StringLiteral -> ^( TOK_TABLELOCATION $locn) ;
	public final HiveParser.tableLocation_return tableLocation() throws RecognitionException {
		HiveParser.tableLocation_return retval = new HiveParser.tableLocation_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token locn=null;
		Token KW_LOCATION800=null;

		ASTNode locn_tree=null;
		ASTNode KW_LOCATION800_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");

		 pushMsg("table location specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2138:5: ( KW_LOCATION locn= StringLiteral -> ^( TOK_TABLELOCATION $locn) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2139:7: KW_LOCATION locn= StringLiteral
			{
			KW_LOCATION800=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_tableLocation12810); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LOCATION.add(KW_LOCATION800);

			locn=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableLocation12814); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_StringLiteral.add(locn);

			// AST REWRITE
			// elements: locn
			// token labels: locn
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_locn=new RewriteRuleTokenStream(adaptor,"token locn",locn);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2139:38: -> ^( TOK_TABLELOCATION $locn)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2139:41: ^( TOK_TABLELOCATION $locn)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABLELOCATION, "TOK_TABLELOCATION"), root_1);
				adaptor.addChild(root_1, stream_locn.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableLocation"


	public static class columnNameTypeList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameTypeList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2142:1: columnNameTypeList : columnNameType ( COMMA columnNameType )* -> ^( TOK_TABCOLLIST ( columnNameType )+ ) ;
	public final HiveParser.columnNameTypeList_return columnNameTypeList() throws RecognitionException {
		HiveParser.columnNameTypeList_return retval = new HiveParser.columnNameTypeList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA802=null;
		ParserRuleReturnScope columnNameType801 =null;
		ParserRuleReturnScope columnNameType803 =null;

		ASTNode COMMA802_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_columnNameType=new RewriteRuleSubtreeStream(adaptor,"rule columnNameType");

		 pushMsg("column name type list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2145:5: ( columnNameType ( COMMA columnNameType )* -> ^( TOK_TABCOLLIST ( columnNameType )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2145:7: columnNameType ( COMMA columnNameType )*
			{
			pushFollow(FOLLOW_columnNameType_in_columnNameTypeList12850);
			columnNameType801=columnNameType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameType.add(columnNameType801.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2145:22: ( COMMA columnNameType )*
			loop237:
			while (true) {
				int alt237=2;
				int LA237_0 = input.LA(1);
				if ( (LA237_0==COMMA) ) {
					int LA237_20 = input.LA(2);
					if ( (LA237_20==Identifier||(LA237_20 >= KW_ABORT && LA237_20 <= KW_AFTER)||LA237_20==KW_ALLOC_FRACTION||LA237_20==KW_ANALYZE||LA237_20==KW_ARCHIVE||LA237_20==KW_ASC||(LA237_20 >= KW_AUTOCOMMIT && LA237_20 <= KW_BEFORE)||(LA237_20 >= KW_BUCKET && LA237_20 <= KW_BUCKETS)||(LA237_20 >= KW_CACHE && LA237_20 <= KW_CASCADE)||LA237_20==KW_CHANGE||(LA237_20 >= KW_CHECK && LA237_20 <= KW_COLLECTION)||(LA237_20 >= KW_COLUMNS && LA237_20 <= KW_COMMENT)||(LA237_20 >= KW_COMPACT && LA237_20 <= KW_CONCATENATE)||LA237_20==KW_CONTINUE||LA237_20==KW_DATA||LA237_20==KW_DATABASES||(LA237_20 >= KW_DATETIME && LA237_20 <= KW_DBPROPERTIES)||(LA237_20 >= KW_DEFAULT && LA237_20 <= KW_DEFINED)||(LA237_20 >= KW_DELIMITED && LA237_20 <= KW_DESC)||(LA237_20 >= KW_DETAIL && LA237_20 <= KW_DISABLE)||(LA237_20 >= KW_DISTRIBUTE && LA237_20 <= KW_DO)||LA237_20==KW_DOW||(LA237_20 >= KW_DUMP && LA237_20 <= KW_ELEM_TYPE)||LA237_20==KW_ENABLE||(LA237_20 >= KW_ENFORCED && LA237_20 <= KW_ESCAPED)||LA237_20==KW_EXCLUSIVE||(LA237_20 >= KW_EXPLAIN && LA237_20 <= KW_EXPRESSION)||(LA237_20 >= KW_FIELDS && LA237_20 <= KW_FIRST)||(LA237_20 >= KW_FORMAT && LA237_20 <= KW_FORMATTED)||LA237_20==KW_FUNCTIONS||(LA237_20 >= KW_HOUR && LA237_20 <= KW_IDXPROPERTIES)||(LA237_20 >= KW_INDEX && LA237_20 <= KW_INDEXES)||(LA237_20 >= KW_INPATH && LA237_20 <= KW_INPUTFORMAT)||(LA237_20 >= KW_ISOLATION && LA237_20 <= KW_JAR)||(LA237_20 >= KW_KEY && LA237_20 <= KW_LAST)||LA237_20==KW_LEVEL||(LA237_20 >= KW_LIMIT && LA237_20 <= KW_LOAD)||(LA237_20 >= KW_LOCATION && LA237_20 <= KW_LONG)||LA237_20==KW_MANAGEMENT||(LA237_20 >= KW_MAPJOIN && LA237_20 <= KW_MATERIALIZED)||LA237_20==KW_METADATA||(LA237_20 >= KW_MINUTE && LA237_20 <= KW_MONTH)||(LA237_20 >= KW_MOVE && LA237_20 <= KW_MSCK)||(LA237_20 >= KW_NORELY && LA237_20 <= KW_NOSCAN)||LA237_20==KW_NOVALIDATE||LA237_20==KW_NULLS||LA237_20==KW_OFFSET||(LA237_20 >= KW_OPERATOR && LA237_20 <= KW_OPTION)||(LA237_20 >= KW_OUTPUTDRIVER && LA237_20 <= KW_OUTPUTFORMAT)||(LA237_20 >= KW_OVERWRITE && LA237_20 <= KW_OWNER)||(LA237_20 >= KW_PARTITIONED && LA237_20 <= KW_PATH)||(LA237_20 >= KW_PLAN && LA237_20 <= KW_POOL)||LA237_20==KW_PRINCIPALS||(LA237_20 >= KW_PURGE && LA237_20 <= KW_QUERY_PARALLELISM)||LA237_20==KW_READ||(LA237_20 >= KW_REBUILD && LA237_20 <= KW_RECORDWRITER)||(LA237_20 >= KW_RELOAD && LA237_20 <= KW_RESTRICT)||LA237_20==KW_REWRITE||(LA237_20 >= KW_ROLE && LA237_20 <= KW_ROLES)||(LA237_20 >= KW_SCHEDULING_POLICY && LA237_20 <= KW_SECOND)||(LA237_20 >= KW_SEMI && LA237_20 <= KW_SERVER)||(LA237_20 >= KW_SETS && LA237_20 <= KW_SKEWED)||(LA237_20 >= KW_SNAPSHOT && LA237_20 <= KW_SSL)||(LA237_20 >= KW_STATISTICS && LA237_20 <= KW_SUMMARY)||LA237_20==KW_TABLES||(LA237_20 >= KW_TBLPROPERTIES && LA237_20 <= KW_TERMINATED)||LA237_20==KW_TINYINT||(LA237_20 >= KW_TOUCH && LA237_20 <= KW_TRANSACTIONS)||LA237_20==KW_UNARCHIVE||LA237_20==KW_UNDO||LA237_20==KW_UNIONTYPE||(LA237_20 >= KW_UNLOCK && LA237_20 <= KW_UNSIGNED)||(LA237_20 >= KW_URI && LA237_20 <= KW_USE)||(LA237_20 >= KW_UTC && LA237_20 <= KW_VALIDATE)||LA237_20==KW_VALUE_TYPE||(LA237_20 >= KW_VECTORIZATION && LA237_20 <= KW_WEEK)||LA237_20==KW_WHILE||(LA237_20 >= KW_WORK && LA237_20 <= KW_ZONE)||LA237_20==KW_BATCH||LA237_20==KW_DAYOFWEEK||LA237_20==KW_HOLD_DDLTIME||LA237_20==KW_IGNORE||LA237_20==KW_NO_DROP||LA237_20==KW_OFFLINE||LA237_20==KW_PROTECTION||LA237_20==KW_READONLY||LA237_20==KW_TIMESTAMPTZ) ) {
						alt237=1;
					}

				}

				switch (alt237) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2145:23: COMMA columnNameType
					{
					COMMA802=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameTypeList12853); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA802);

					pushFollow(FOLLOW_columnNameType_in_columnNameTypeList12855);
					columnNameType803=columnNameType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameType.add(columnNameType803.getTree());
					}
					break;

				default :
					break loop237;
				}
			}

			// AST REWRITE
			// elements: columnNameType
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2145:46: -> ^( TOK_TABCOLLIST ( columnNameType )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2145:49: ^( TOK_TABCOLLIST ( columnNameType )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLLIST, "TOK_TABCOLLIST"), root_1);
				if ( !(stream_columnNameType.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_columnNameType.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameType.nextTree());
				}
				stream_columnNameType.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameTypeList"


	public static class columnNameTypeOrConstraintList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameTypeOrConstraintList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2148:1: columnNameTypeOrConstraintList : columnNameTypeOrConstraint ( COMMA columnNameTypeOrConstraint )* -> ^( TOK_TABCOLLIST ( columnNameTypeOrConstraint )+ ) ;
	public final HiveParser.columnNameTypeOrConstraintList_return columnNameTypeOrConstraintList() throws RecognitionException {
		HiveParser.columnNameTypeOrConstraintList_return retval = new HiveParser.columnNameTypeOrConstraintList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA805=null;
		ParserRuleReturnScope columnNameTypeOrConstraint804 =null;
		ParserRuleReturnScope columnNameTypeOrConstraint806 =null;

		ASTNode COMMA805_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_columnNameTypeOrConstraint=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeOrConstraint");

		 pushMsg("column name type and constraints list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2151:5: ( columnNameTypeOrConstraint ( COMMA columnNameTypeOrConstraint )* -> ^( TOK_TABCOLLIST ( columnNameTypeOrConstraint )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2151:7: columnNameTypeOrConstraint ( COMMA columnNameTypeOrConstraint )*
			{
			pushFollow(FOLLOW_columnNameTypeOrConstraint_in_columnNameTypeOrConstraintList12893);
			columnNameTypeOrConstraint804=columnNameTypeOrConstraint();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameTypeOrConstraint.add(columnNameTypeOrConstraint804.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2151:34: ( COMMA columnNameTypeOrConstraint )*
			loop238:
			while (true) {
				int alt238=2;
				int LA238_0 = input.LA(1);
				if ( (LA238_0==COMMA) ) {
					alt238=1;
				}

				switch (alt238) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2151:35: COMMA columnNameTypeOrConstraint
					{
					COMMA805=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameTypeOrConstraintList12896); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA805);

					pushFollow(FOLLOW_columnNameTypeOrConstraint_in_columnNameTypeOrConstraintList12898);
					columnNameTypeOrConstraint806=columnNameTypeOrConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameTypeOrConstraint.add(columnNameTypeOrConstraint806.getTree());
					}
					break;

				default :
					break loop238;
				}
			}

			// AST REWRITE
			// elements: columnNameTypeOrConstraint
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2151:70: -> ^( TOK_TABCOLLIST ( columnNameTypeOrConstraint )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2151:73: ^( TOK_TABCOLLIST ( columnNameTypeOrConstraint )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLLIST, "TOK_TABCOLLIST"), root_1);
				if ( !(stream_columnNameTypeOrConstraint.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_columnNameTypeOrConstraint.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameTypeOrConstraint.nextTree());
				}
				stream_columnNameTypeOrConstraint.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameTypeOrConstraintList"


	public static class columnNameColonTypeList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameColonTypeList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2154:1: columnNameColonTypeList : columnNameColonType ( COMMA columnNameColonType )* -> ^( TOK_TABCOLLIST ( columnNameColonType )+ ) ;
	public final HiveParser.columnNameColonTypeList_return columnNameColonTypeList() throws RecognitionException {
		HiveParser.columnNameColonTypeList_return retval = new HiveParser.columnNameColonTypeList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA808=null;
		ParserRuleReturnScope columnNameColonType807 =null;
		ParserRuleReturnScope columnNameColonType809 =null;

		ASTNode COMMA808_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_columnNameColonType=new RewriteRuleSubtreeStream(adaptor,"rule columnNameColonType");

		 pushMsg("column name type list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:5: ( columnNameColonType ( COMMA columnNameColonType )* -> ^( TOK_TABCOLLIST ( columnNameColonType )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:7: columnNameColonType ( COMMA columnNameColonType )*
			{
			pushFollow(FOLLOW_columnNameColonType_in_columnNameColonTypeList12936);
			columnNameColonType807=columnNameColonType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameColonType.add(columnNameColonType807.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:27: ( COMMA columnNameColonType )*
			loop239:
			while (true) {
				int alt239=2;
				int LA239_0 = input.LA(1);
				if ( (LA239_0==COMMA) ) {
					alt239=1;
				}

				switch (alt239) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:28: COMMA columnNameColonType
					{
					COMMA808=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameColonTypeList12939); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA808);

					pushFollow(FOLLOW_columnNameColonType_in_columnNameColonTypeList12941);
					columnNameColonType809=columnNameColonType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameColonType.add(columnNameColonType809.getTree());
					}
					break;

				default :
					break loop239;
				}
			}

			// AST REWRITE
			// elements: columnNameColonType
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2157:56: -> ^( TOK_TABCOLLIST ( columnNameColonType )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:59: ^( TOK_TABCOLLIST ( columnNameColonType )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLLIST, "TOK_TABCOLLIST"), root_1);
				if ( !(stream_columnNameColonType.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_columnNameColonType.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameColonType.nextTree());
				}
				stream_columnNameColonType.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameColonTypeList"


	public static class columnNameList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2160:1: columnNameList : columnName ( COMMA columnName )* -> ^( TOK_TABCOLNAME ( columnName )+ ) ;
	public final HiveParser.columnNameList_return columnNameList() throws RecognitionException {
		HiveParser.columnNameList_return retval = new HiveParser.columnNameList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA811=null;
		ParserRuleReturnScope columnName810 =null;
		ParserRuleReturnScope columnName812 =null;

		ASTNode COMMA811_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_columnName=new RewriteRuleSubtreeStream(adaptor,"rule columnName");

		 pushMsg("column name list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2163:5: ( columnName ( COMMA columnName )* -> ^( TOK_TABCOLNAME ( columnName )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2163:7: columnName ( COMMA columnName )*
			{
			pushFollow(FOLLOW_columnName_in_columnNameList12979);
			columnName810=columnName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnName.add(columnName810.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2163:18: ( COMMA columnName )*
			loop240:
			while (true) {
				int alt240=2;
				int LA240_0 = input.LA(1);
				if ( (LA240_0==COMMA) ) {
					alt240=1;
				}

				switch (alt240) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2163:19: COMMA columnName
					{
					COMMA811=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameList12982); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA811);

					pushFollow(FOLLOW_columnName_in_columnNameList12984);
					columnName812=columnName();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnName.add(columnName812.getTree());
					}
					break;

				default :
					break loop240;
				}
			}

			// AST REWRITE
			// elements: columnName
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2163:38: -> ^( TOK_TABCOLNAME ( columnName )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2163:41: ^( TOK_TABCOLNAME ( columnName )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_1);
				if ( !(stream_columnName.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_columnName.hasNext() ) {
					adaptor.addChild(root_1, stream_columnName.nextTree());
				}
				stream_columnName.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameList"


	public static class columnName_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnName"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2166:1: columnName : identifier ;
	public final HiveParser.columnName_return columnName() throws RecognitionException {
		HiveParser.columnName_return retval = new HiveParser.columnName_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope identifier813 =null;


		 pushMsg("column name", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2169:5: ( identifier )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2170:7: identifier
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_identifier_in_columnName13028);
			identifier813=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier813.getTree());

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnName"


	public static class extColumnName_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "extColumnName"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2173:1: extColumnName : identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )* ;
	public final HiveParser.extColumnName_return extColumnName() throws RecognitionException {
		HiveParser.extColumnName_return retval = new HiveParser.extColumnName_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token DOT815=null;
		Token KW_ELEM_TYPE816=null;
		Token KW_KEY_TYPE817=null;
		Token KW_VALUE_TYPE818=null;
		ParserRuleReturnScope identifier814 =null;
		ParserRuleReturnScope identifier819 =null;

		ASTNode DOT815_tree=null;
		ASTNode KW_ELEM_TYPE816_tree=null;
		ASTNode KW_KEY_TYPE817_tree=null;
		ASTNode KW_VALUE_TYPE818_tree=null;

		 pushMsg("column name for complex types", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2176:5: ( identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )* )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:7: identifier ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )*
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_identifier_in_extColumnName13061);
			identifier814=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier814.getTree());

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:18: ( DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier ) )*
			loop242:
			while (true) {
				int alt242=2;
				int LA242_0 = input.LA(1);
				if ( (LA242_0==DOT) ) {
					alt242=1;
				}

				switch (alt242) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:19: DOT ^ ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier )
					{
					DOT815=(Token)match(input,DOT,FOLLOW_DOT_in_extColumnName13064); if (state.failed) return retval;
					if ( state.backtracking==0 ) {
					DOT815_tree = (ASTNode)adaptor.create(DOT815);
					root_0 = (ASTNode)adaptor.becomeRoot(DOT815_tree, root_0);
					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:24: ( ( KW_ELEM_TYPE )=> KW_ELEM_TYPE | ( KW_KEY_TYPE )=> KW_KEY_TYPE | ( KW_VALUE_TYPE )=> KW_VALUE_TYPE | identifier )
					int alt241=4;
					switch ( input.LA(1) ) {
					case KW_ELEM_TYPE:
						{
						int LA241_1 = input.LA(2);
						if ( (synpred19_HiveParser()) ) {
							alt241=1;
						}
						else if ( (true) ) {
							alt241=4;
						}

						}
						break;
					case KW_KEY_TYPE:
						{
						int LA241_2 = input.LA(2);
						if ( (synpred20_HiveParser()) ) {
							alt241=2;
						}
						else if ( (true) ) {
							alt241=4;
						}

						}
						break;
					case KW_VALUE_TYPE:
						{
						int LA241_3 = input.LA(2);
						if ( (synpred21_HiveParser()) ) {
							alt241=3;
						}
						else if ( (true) ) {
							alt241=4;
						}

						}
						break;
					case Identifier:
					case KW_ABORT:
					case KW_ACTIVATE:
					case KW_ACTIVE:
					case KW_ADD:
					case KW_ADMIN:
					case KW_AFTER:
					case KW_ALLOC_FRACTION:
					case KW_ANALYZE:
					case KW_ARCHIVE:
					case KW_ASC:
					case KW_AUTOCOMMIT:
					case KW_BEFORE:
					case KW_BUCKET:
					case KW_BUCKETS:
					case KW_CACHE:
					case KW_CASCADE:
					case KW_CHANGE:
					case KW_CHECK:
					case KW_CLUSTER:
					case KW_CLUSTERED:
					case KW_CLUSTERSTATUS:
					case KW_COLLECTION:
					case KW_COLUMNS:
					case KW_COMMENT:
					case KW_COMPACT:
					case KW_COMPACTIONS:
					case KW_COMPUTE:
					case KW_CONCATENATE:
					case KW_CONTINUE:
					case KW_DATA:
					case KW_DATABASES:
					case KW_DATETIME:
					case KW_DAY:
					case KW_DBPROPERTIES:
					case KW_DEFAULT:
					case KW_DEFERRED:
					case KW_DEFINED:
					case KW_DELIMITED:
					case KW_DEPENDENCY:
					case KW_DESC:
					case KW_DETAIL:
					case KW_DIRECTORIES:
					case KW_DIRECTORY:
					case KW_DISABLE:
					case KW_DISTRIBUTE:
					case KW_DO:
					case KW_DOW:
					case KW_DUMP:
					case KW_ENABLE:
					case KW_ENFORCED:
					case KW_ESCAPED:
					case KW_EXCLUSIVE:
					case KW_EXPLAIN:
					case KW_EXPORT:
					case KW_EXPRESSION:
					case KW_FIELDS:
					case KW_FILE:
					case KW_FILEFORMAT:
					case KW_FIRST:
					case KW_FORMAT:
					case KW_FORMATTED:
					case KW_FUNCTIONS:
					case KW_HOUR:
					case KW_IDXPROPERTIES:
					case KW_INDEX:
					case KW_INDEXES:
					case KW_INPATH:
					case KW_INPUTDRIVER:
					case KW_INPUTFORMAT:
					case KW_ISOLATION:
					case KW_ITEMS:
					case KW_JAR:
					case KW_KEY:
					case KW_KEYS:
					case KW_KILL:
					case KW_LAST:
					case KW_LEVEL:
					case KW_LIMIT:
					case KW_LINES:
					case KW_LOAD:
					case KW_LOCATION:
					case KW_LOCK:
					case KW_LOCKS:
					case KW_LOGICAL:
					case KW_LONG:
					case KW_MANAGEMENT:
					case KW_MAPJOIN:
					case KW_MAPPING:
					case KW_MATCHED:
					case KW_MATERIALIZED:
					case KW_METADATA:
					case KW_MINUTE:
					case KW_MONTH:
					case KW_MOVE:
					case KW_MSCK:
					case KW_NORELY:
					case KW_NOSCAN:
					case KW_NOVALIDATE:
					case KW_NULLS:
					case KW_OFFSET:
					case KW_OPERATOR:
					case KW_OPTION:
					case KW_OUTPUTDRIVER:
					case KW_OUTPUTFORMAT:
					case KW_OVERWRITE:
					case KW_OWNER:
					case KW_PARTITIONED:
					case KW_PARTITIONS:
					case KW_PATH:
					case KW_PLAN:
					case KW_PLANS:
					case KW_PLUS:
					case KW_POOL:
					case KW_PRINCIPALS:
					case KW_PURGE:
					case KW_QUARTER:
					case KW_QUERY:
					case KW_QUERY_PARALLELISM:
					case KW_READ:
					case KW_REBUILD:
					case KW_RECORDREADER:
					case KW_RECORDWRITER:
					case KW_RELOAD:
					case KW_RELY:
					case KW_RENAME:
					case KW_REOPTIMIZATION:
					case KW_REPAIR:
					case KW_REPL:
					case KW_REPLACE:
					case KW_REPLICATION:
					case KW_RESOURCE:
					case KW_RESTRICT:
					case KW_REWRITE:
					case KW_ROLE:
					case KW_ROLES:
					case KW_SCHEDULING_POLICY:
					case KW_SCHEMA:
					case KW_SCHEMAS:
					case KW_SECOND:
					case KW_SEMI:
					case KW_SERDE:
					case KW_SERDEPROPERTIES:
					case KW_SERVER:
					case KW_SETS:
					case KW_SHARED:
					case KW_SHOW:
					case KW_SHOW_DATABASE:
					case KW_SKEWED:
					case KW_SNAPSHOT:
					case KW_SORT:
					case KW_SORTED:
					case KW_SSL:
					case KW_STATISTICS:
					case KW_STATUS:
					case KW_STORED:
					case KW_STREAMTABLE:
					case KW_STRING:
					case KW_STRUCT:
					case KW_SUMMARY:
					case KW_TABLES:
					case KW_TBLPROPERTIES:
					case KW_TEMPORARY:
					case KW_TERMINATED:
					case KW_TINYINT:
					case KW_TOUCH:
					case KW_TRANSACTION:
					case KW_TRANSACTIONS:
					case KW_UNARCHIVE:
					case KW_UNDO:
					case KW_UNIONTYPE:
					case KW_UNLOCK:
					case KW_UNMANAGED:
					case KW_UNSET:
					case KW_UNSIGNED:
					case KW_URI:
					case KW_USE:
					case KW_UTC:
					case KW_UTCTIMESTAMP:
					case KW_VALIDATE:
					case KW_VECTORIZATION:
					case KW_VIEW:
					case KW_VIEWS:
					case KW_WAIT:
					case KW_WEEK:
					case KW_WHILE:
					case KW_WORK:
					case KW_WORKLOAD:
					case KW_WRITE:
					case KW_YEAR:
					case KW_ZONE:
					case KW_BATCH:
					case KW_DAYOFWEEK:
					case KW_HOLD_DDLTIME:
					case KW_IGNORE:
					case KW_NO_DROP:
					case KW_OFFLINE:
					case KW_PROTECTION:
					case KW_READONLY:
					case KW_TIMESTAMPTZ:
						{
						alt241=4;
						}
						break;
					default:
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 241, 0, input);
						throw nvae;
					}
					switch (alt241) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:25: ( KW_ELEM_TYPE )=> KW_ELEM_TYPE
							{
							KW_ELEM_TYPE816=(Token)match(input,KW_ELEM_TYPE,FOLLOW_KW_ELEM_TYPE_in_extColumnName13074); if (state.failed) return retval;
							if ( state.backtracking==0 ) {
							KW_ELEM_TYPE816_tree = (ASTNode)adaptor.create(KW_ELEM_TYPE816);
							adaptor.addChild(root_0, KW_ELEM_TYPE816_tree);
							}

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:58: ( KW_KEY_TYPE )=> KW_KEY_TYPE
							{
							KW_KEY_TYPE817=(Token)match(input,KW_KEY_TYPE,FOLLOW_KW_KEY_TYPE_in_extColumnName13084); if (state.failed) return retval;
							if ( state.backtracking==0 ) {
							KW_KEY_TYPE817_tree = (ASTNode)adaptor.create(KW_KEY_TYPE817);
							adaptor.addChild(root_0, KW_KEY_TYPE817_tree);
							}

							}
							break;
						case 3 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:89: ( KW_VALUE_TYPE )=> KW_VALUE_TYPE
							{
							KW_VALUE_TYPE818=(Token)match(input,KW_VALUE_TYPE,FOLLOW_KW_VALUE_TYPE_in_extColumnName13094); if (state.failed) return retval;
							if ( state.backtracking==0 ) {
							KW_VALUE_TYPE818_tree = (ASTNode)adaptor.create(KW_VALUE_TYPE818);
							adaptor.addChild(root_0, KW_VALUE_TYPE818_tree);
							}

							}
							break;
						case 4 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:124: identifier
							{
							pushFollow(FOLLOW_identifier_in_extColumnName13098);
							identifier819=identifier();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) adaptor.addChild(root_0, identifier819.getTree());

							}
							break;

					}

					}
					break;

				default :
					break loop242;
				}
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "extColumnName"


	public static class columnNameOrderList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameOrderList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2180:1: columnNameOrderList : columnNameOrder ( COMMA columnNameOrder )* -> ^( TOK_TABCOLNAME ( columnNameOrder )+ ) ;
	public final HiveParser.columnNameOrderList_return columnNameOrderList() throws RecognitionException {
		HiveParser.columnNameOrderList_return retval = new HiveParser.columnNameOrderList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA821=null;
		ParserRuleReturnScope columnNameOrder820 =null;
		ParserRuleReturnScope columnNameOrder822 =null;

		ASTNode COMMA821_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_columnNameOrder=new RewriteRuleSubtreeStream(adaptor,"rule columnNameOrder");

		 pushMsg("column name order list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2183:5: ( columnNameOrder ( COMMA columnNameOrder )* -> ^( TOK_TABCOLNAME ( columnNameOrder )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2183:7: columnNameOrder ( COMMA columnNameOrder )*
			{
			pushFollow(FOLLOW_columnNameOrder_in_columnNameOrderList13128);
			columnNameOrder820=columnNameOrder();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameOrder.add(columnNameOrder820.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2183:23: ( COMMA columnNameOrder )*
			loop243:
			while (true) {
				int alt243=2;
				int LA243_0 = input.LA(1);
				if ( (LA243_0==COMMA) ) {
					alt243=1;
				}

				switch (alt243) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2183:24: COMMA columnNameOrder
					{
					COMMA821=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameOrderList13131); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA821);

					pushFollow(FOLLOW_columnNameOrder_in_columnNameOrderList13133);
					columnNameOrder822=columnNameOrder();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameOrder.add(columnNameOrder822.getTree());
					}
					break;

				default :
					break loop243;
				}
			}

			// AST REWRITE
			// elements: columnNameOrder
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2183:48: -> ^( TOK_TABCOLNAME ( columnNameOrder )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2183:51: ^( TOK_TABCOLNAME ( columnNameOrder )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_1);
				if ( !(stream_columnNameOrder.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_columnNameOrder.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameOrder.nextTree());
				}
				stream_columnNameOrder.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameOrderList"


	public static class columnParenthesesList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnParenthesesList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2186:1: columnParenthesesList : LPAREN ! columnNameList RPAREN !;
	public final HiveParser.columnParenthesesList_return columnParenthesesList() throws RecognitionException {
		HiveParser.columnParenthesesList_return retval = new HiveParser.columnParenthesesList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN823=null;
		Token RPAREN825=null;
		ParserRuleReturnScope columnNameList824 =null;

		ASTNode LPAREN823_tree=null;
		ASTNode RPAREN825_tree=null;

		 pushMsg("column parentheses list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2189:5: ( LPAREN ! columnNameList RPAREN !)
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2189:7: LPAREN ! columnNameList RPAREN !
			{
			root_0 = (ASTNode)adaptor.nil();


			LPAREN823=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_columnParenthesesList13171); if (state.failed) return retval;
			pushFollow(FOLLOW_columnNameList_in_columnParenthesesList13174);
			columnNameList824=columnNameList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, columnNameList824.getTree());

			RPAREN825=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_columnParenthesesList13176); if (state.failed) return retval;
			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnParenthesesList"


	public static class enableValidateSpecification_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "enableValidateSpecification"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2192:1: enableValidateSpecification : ( enableSpecification ( validateSpecification )? | enforcedSpecification );
	public final HiveParser.enableValidateSpecification_return enableValidateSpecification() throws RecognitionException {
		HiveParser.enableValidateSpecification_return retval = new HiveParser.enableValidateSpecification_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope enableSpecification826 =null;
		ParserRuleReturnScope validateSpecification827 =null;
		ParserRuleReturnScope enforcedSpecification828 =null;


		 pushMsg("enable specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2195:5: ( enableSpecification ( validateSpecification )? | enforcedSpecification )
			int alt245=2;
			int LA245_0 = input.LA(1);
			if ( (LA245_0==KW_DISABLE||LA245_0==KW_ENABLE) ) {
				alt245=1;
			}
			else if ( (LA245_0==KW_ENFORCED||LA245_0==KW_NOT) ) {
				alt245=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 245, 0, input);
				throw nvae;
			}

			switch (alt245) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2195:7: enableSpecification ( validateSpecification )?
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_enableSpecification_in_enableValidateSpecification13204);
					enableSpecification826=enableSpecification();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, enableSpecification826.getTree());

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2195:27: ( validateSpecification )?
					int alt244=2;
					int LA244_0 = input.LA(1);
					if ( (LA244_0==KW_NOVALIDATE||LA244_0==KW_VALIDATE) ) {
						alt244=1;
					}
					switch (alt244) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2195:27: validateSpecification
							{
							pushFollow(FOLLOW_validateSpecification_in_enableValidateSpecification13206);
							validateSpecification827=validateSpecification();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) adaptor.addChild(root_0, validateSpecification827.getTree());

							}
							break;

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2196:7: enforcedSpecification
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_enforcedSpecification_in_enableValidateSpecification13215);
					enforcedSpecification828=enforcedSpecification();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, enforcedSpecification828.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "enableValidateSpecification"


	public static class enableSpecification_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "enableSpecification"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2199:1: enableSpecification : ( KW_ENABLE -> ^( TOK_ENABLE ) | KW_DISABLE -> ^( TOK_DISABLE ) );
	public final HiveParser.enableSpecification_return enableSpecification() throws RecognitionException {
		HiveParser.enableSpecification_return retval = new HiveParser.enableSpecification_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ENABLE829=null;
		Token KW_DISABLE830=null;

		ASTNode KW_ENABLE829_tree=null;
		ASTNode KW_DISABLE830_tree=null;
		RewriteRuleTokenStream stream_KW_DISABLE=new RewriteRuleTokenStream(adaptor,"token KW_DISABLE");
		RewriteRuleTokenStream stream_KW_ENABLE=new RewriteRuleTokenStream(adaptor,"token KW_ENABLE");

		 pushMsg("enable specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2202:5: ( KW_ENABLE -> ^( TOK_ENABLE ) | KW_DISABLE -> ^( TOK_DISABLE ) )
			int alt246=2;
			int LA246_0 = input.LA(1);
			if ( (LA246_0==KW_ENABLE) ) {
				alt246=1;
			}
			else if ( (LA246_0==KW_DISABLE) ) {
				alt246=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 246, 0, input);
				throw nvae;
			}

			switch (alt246) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2202:7: KW_ENABLE
					{
					KW_ENABLE829=(Token)match(input,KW_ENABLE,FOLLOW_KW_ENABLE_in_enableSpecification13242); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ENABLE.add(KW_ENABLE829);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2202:17: -> ^( TOK_ENABLE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2202:20: ^( TOK_ENABLE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ENABLE, "TOK_ENABLE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2203:7: KW_DISABLE
					{
					KW_DISABLE830=(Token)match(input,KW_DISABLE,FOLLOW_KW_DISABLE_in_enableSpecification13256); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DISABLE.add(KW_DISABLE830);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2203:18: -> ^( TOK_DISABLE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2203:21: ^( TOK_DISABLE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DISABLE, "TOK_DISABLE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "enableSpecification"


	public static class validateSpecification_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "validateSpecification"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2206:1: validateSpecification : ( KW_VALIDATE -> ^( TOK_VALIDATE ) | KW_NOVALIDATE -> ^( TOK_NOVALIDATE ) );
	public final HiveParser.validateSpecification_return validateSpecification() throws RecognitionException {
		HiveParser.validateSpecification_return retval = new HiveParser.validateSpecification_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_VALIDATE831=null;
		Token KW_NOVALIDATE832=null;

		ASTNode KW_VALIDATE831_tree=null;
		ASTNode KW_NOVALIDATE832_tree=null;
		RewriteRuleTokenStream stream_KW_VALIDATE=new RewriteRuleTokenStream(adaptor,"token KW_VALIDATE");
		RewriteRuleTokenStream stream_KW_NOVALIDATE=new RewriteRuleTokenStream(adaptor,"token KW_NOVALIDATE");

		 pushMsg("validate specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:5: ( KW_VALIDATE -> ^( TOK_VALIDATE ) | KW_NOVALIDATE -> ^( TOK_NOVALIDATE ) )
			int alt247=2;
			int LA247_0 = input.LA(1);
			if ( (LA247_0==KW_VALIDATE) ) {
				alt247=1;
			}
			else if ( (LA247_0==KW_NOVALIDATE) ) {
				alt247=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 247, 0, input);
				throw nvae;
			}

			switch (alt247) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:7: KW_VALIDATE
					{
					KW_VALIDATE831=(Token)match(input,KW_VALIDATE,FOLLOW_KW_VALIDATE_in_validateSpecification13289); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_VALIDATE.add(KW_VALIDATE831);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2209:19: -> ^( TOK_VALIDATE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:22: ^( TOK_VALIDATE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_VALIDATE, "TOK_VALIDATE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2210:7: KW_NOVALIDATE
					{
					KW_NOVALIDATE832=(Token)match(input,KW_NOVALIDATE,FOLLOW_KW_NOVALIDATE_in_validateSpecification13303); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NOVALIDATE.add(KW_NOVALIDATE832);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2210:21: -> ^( TOK_NOVALIDATE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2210:24: ^( TOK_NOVALIDATE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NOVALIDATE, "TOK_NOVALIDATE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "validateSpecification"


	public static class enforcedSpecification_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "enforcedSpecification"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2213:1: enforcedSpecification : ( KW_ENFORCED -> ^( TOK_ENABLE ) | KW_NOT KW_ENFORCED -> ^( TOK_DISABLE ) );
	public final HiveParser.enforcedSpecification_return enforcedSpecification() throws RecognitionException {
		HiveParser.enforcedSpecification_return retval = new HiveParser.enforcedSpecification_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ENFORCED833=null;
		Token KW_NOT834=null;
		Token KW_ENFORCED835=null;

		ASTNode KW_ENFORCED833_tree=null;
		ASTNode KW_NOT834_tree=null;
		ASTNode KW_ENFORCED835_tree=null;
		RewriteRuleTokenStream stream_KW_ENFORCED=new RewriteRuleTokenStream(adaptor,"token KW_ENFORCED");
		RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");

		 pushMsg("enforced specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2216:5: ( KW_ENFORCED -> ^( TOK_ENABLE ) | KW_NOT KW_ENFORCED -> ^( TOK_DISABLE ) )
			int alt248=2;
			int LA248_0 = input.LA(1);
			if ( (LA248_0==KW_ENFORCED) ) {
				alt248=1;
			}
			else if ( (LA248_0==KW_NOT) ) {
				alt248=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 248, 0, input);
				throw nvae;
			}

			switch (alt248) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2216:7: KW_ENFORCED
					{
					KW_ENFORCED833=(Token)match(input,KW_ENFORCED,FOLLOW_KW_ENFORCED_in_enforcedSpecification13336); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ENFORCED.add(KW_ENFORCED833);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2216:19: -> ^( TOK_ENABLE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2216:22: ^( TOK_ENABLE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ENABLE, "TOK_ENABLE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2217:7: KW_NOT KW_ENFORCED
					{
					KW_NOT834=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_enforcedSpecification13350); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT834);

					KW_ENFORCED835=(Token)match(input,KW_ENFORCED,FOLLOW_KW_ENFORCED_in_enforcedSpecification13352); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ENFORCED.add(KW_ENFORCED835);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2217:26: -> ^( TOK_DISABLE )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2217:29: ^( TOK_DISABLE )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DISABLE, "TOK_DISABLE"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "enforcedSpecification"


	public static class relySpecification_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "relySpecification"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2220:1: relySpecification : ( KW_RELY -> ^( TOK_RELY ) | ( KW_NORELY )? -> ^( TOK_NORELY ) );
	public final HiveParser.relySpecification_return relySpecification() throws RecognitionException {
		HiveParser.relySpecification_return retval = new HiveParser.relySpecification_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_RELY836=null;
		Token KW_NORELY837=null;

		ASTNode KW_RELY836_tree=null;
		ASTNode KW_NORELY837_tree=null;
		RewriteRuleTokenStream stream_KW_NORELY=new RewriteRuleTokenStream(adaptor,"token KW_NORELY");
		RewriteRuleTokenStream stream_KW_RELY=new RewriteRuleTokenStream(adaptor,"token KW_RELY");

		 pushMsg("rely specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2223:5: ( KW_RELY -> ^( TOK_RELY ) | ( KW_NORELY )? -> ^( TOK_NORELY ) )
			int alt250=2;
			int LA250_0 = input.LA(1);
			if ( (LA250_0==KW_RELY) ) {
				alt250=1;
			}
			else if ( (LA250_0==EOF||LA250_0==COMMA||LA250_0==KW_AFTER||LA250_0==KW_CASCADE||LA250_0==KW_COMMENT||LA250_0==KW_FIRST||LA250_0==KW_NORELY||LA250_0==KW_RESTRICT||LA250_0==RPAREN) ) {
				alt250=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 250, 0, input);
				throw nvae;
			}

			switch (alt250) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2223:8: KW_RELY
					{
					KW_RELY836=(Token)match(input,KW_RELY,FOLLOW_KW_RELY_in_relySpecification13386); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_RELY.add(KW_RELY836);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2223:16: -> ^( TOK_RELY )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2223:19: ^( TOK_RELY )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_RELY, "TOK_RELY"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2224:8: ( KW_NORELY )?
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2224:8: ( KW_NORELY )?
					int alt249=2;
					int LA249_0 = input.LA(1);
					if ( (LA249_0==KW_NORELY) ) {
						alt249=1;
					}
					switch (alt249) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2224:9: KW_NORELY
							{
							KW_NORELY837=(Token)match(input,KW_NORELY,FOLLOW_KW_NORELY_in_relySpecification13402); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_NORELY.add(KW_NORELY837);

							}
							break;

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2224:21: -> ^( TOK_NORELY )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2224:24: ^( TOK_NORELY )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NORELY, "TOK_NORELY"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "relySpecification"


	public static class createConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2227:1: createConstraint : ( KW_CONSTRAINT constraintName= identifier )? tableLevelConstraint ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? ) -> ^( ( constraintOptsCreate )? ) ;
	public final HiveParser.createConstraint_return createConstraint() throws RecognitionException {
		HiveParser.createConstraint_return retval = new HiveParser.createConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT838=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope tableLevelConstraint839 =null;
		ParserRuleReturnScope constraintOptsCreate840 =null;

		ASTNode KW_CONSTRAINT838_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleSubtreeStream stream_constraintOptsCreate=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsCreate");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_tableLevelConstraint=new RewriteRuleSubtreeStream(adaptor,"rule tableLevelConstraint");

		 pushMsg("pk or uk or nn constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2230:5: ( ( KW_CONSTRAINT constraintName= identifier )? tableLevelConstraint ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? ) -> ^( ( constraintOptsCreate )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2230:7: ( KW_CONSTRAINT constraintName= identifier )? tableLevelConstraint ( constraintOptsCreate )?
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2230:7: ( KW_CONSTRAINT constraintName= identifier )?
			int alt251=2;
			int LA251_0 = input.LA(1);
			if ( (LA251_0==KW_CONSTRAINT) ) {
				alt251=1;
			}
			switch (alt251) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2230:8: KW_CONSTRAINT constraintName= identifier
					{
					KW_CONSTRAINT838=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_createConstraint13438); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT838);

					pushFollow(FOLLOW_identifier_in_createConstraint13442);
					constraintName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_tableLevelConstraint_in_createConstraint13446);
			tableLevelConstraint839=tableLevelConstraint();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableLevelConstraint.add(tableLevelConstraint839.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2230:71: ( constraintOptsCreate )?
			int alt252=2;
			int LA252_0 = input.LA(1);
			if ( (LA252_0==KW_DISABLE||LA252_0==KW_ENABLE||LA252_0==KW_ENFORCED||LA252_0==KW_NOT) ) {
				alt252=1;
			}
			switch (alt252) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2230:71: constraintOptsCreate
					{
					pushFollow(FOLLOW_constraintOptsCreate_in_createConstraint13448);
					constraintOptsCreate840=constraintOptsCreate();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsCreate.add(constraintOptsCreate840.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: constraintOptsCreate, constraintName, constraintOptsCreate
			// token labels: 
			// rule labels: constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2231:5: -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? )
			if ((constraintName!=null?((ASTNode)constraintName.getTree()):null) != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2232:13: ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((tableLevelConstraint839!=null?((ASTNode)tableLevelConstraint839.getTree()):null), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2232:44: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2232:83: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2233:5: -> ^( ( constraintOptsCreate )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2233:8: ^( ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((tableLevelConstraint839!=null?((ASTNode)tableLevelConstraint839.getTree()):null), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2233:39: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createConstraint"


	public static class alterConstraintWithName_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterConstraintWithName"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2236:1: alterConstraintWithName : KW_CONSTRAINT constraintName= identifier tableLevelConstraint ( constraintOptsAlter )? -> ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? ) ;
	public final HiveParser.alterConstraintWithName_return alterConstraintWithName() throws RecognitionException {
		HiveParser.alterConstraintWithName_return retval = new HiveParser.alterConstraintWithName_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT841=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope tableLevelConstraint842 =null;
		ParserRuleReturnScope constraintOptsAlter843 =null;

		ASTNode KW_CONSTRAINT841_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_tableLevelConstraint=new RewriteRuleSubtreeStream(adaptor,"rule tableLevelConstraint");
		RewriteRuleSubtreeStream stream_constraintOptsAlter=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsAlter");

		 pushMsg("pk or uk or nn constraint with name", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2239:5: ( KW_CONSTRAINT constraintName= identifier tableLevelConstraint ( constraintOptsAlter )? -> ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2239:7: KW_CONSTRAINT constraintName= identifier tableLevelConstraint ( constraintOptsAlter )?
			{
			KW_CONSTRAINT841=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_alterConstraintWithName13523); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT841);

			pushFollow(FOLLOW_identifier_in_alterConstraintWithName13527);
			constraintName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
			pushFollow(FOLLOW_tableLevelConstraint_in_alterConstraintWithName13529);
			tableLevelConstraint842=tableLevelConstraint();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableLevelConstraint.add(tableLevelConstraint842.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2239:68: ( constraintOptsAlter )?
			int alt253=2;
			int LA253_0 = input.LA(1);
			if ( (LA253_0==KW_DISABLE||LA253_0==KW_ENABLE||LA253_0==KW_ENFORCED||LA253_0==KW_NOT) ) {
				alt253=1;
			}
			switch (alt253) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2239:68: constraintOptsAlter
					{
					pushFollow(FOLLOW_constraintOptsAlter_in_alterConstraintWithName13531);
					constraintOptsAlter843=constraintOptsAlter();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsAlter.add(constraintOptsAlter843.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: constraintOptsAlter, constraintName
			// token labels: 
			// rule labels: constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2240:5: -> ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2240:7: ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((tableLevelConstraint842!=null?((ASTNode)tableLevelConstraint842.getTree()):null), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2240:38: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2240:77: ( constraintOptsAlter )?
				if ( stream_constraintOptsAlter.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsAlter.nextTree());
				}
				stream_constraintOptsAlter.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterConstraintWithName"


	public static class tableLevelConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableLevelConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2243:1: tableLevelConstraint : ( pkUkConstraint | checkConstraint );
	public final HiveParser.tableLevelConstraint_return tableLevelConstraint() throws RecognitionException {
		HiveParser.tableLevelConstraint_return retval = new HiveParser.tableLevelConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope pkUkConstraint844 =null;
		ParserRuleReturnScope checkConstraint845 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2244:5: ( pkUkConstraint | checkConstraint )
			int alt254=2;
			int LA254_0 = input.LA(1);
			if ( (LA254_0==KW_PRIMARY||LA254_0==KW_UNIQUE) ) {
				alt254=1;
			}
			else if ( (LA254_0==KW_CHECK) ) {
				alt254=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 254, 0, input);
				throw nvae;
			}

			switch (alt254) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2244:7: pkUkConstraint
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_pkUkConstraint_in_tableLevelConstraint13568);
					pkUkConstraint844=pkUkConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, pkUkConstraint844.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2245:7: checkConstraint
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_checkConstraint_in_tableLevelConstraint13576);
					checkConstraint845=checkConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, checkConstraint845.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableLevelConstraint"


	public static class pkUkConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "pkUkConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2248:1: pkUkConstraint : tableConstraintType pkCols= columnParenthesesList -> ^( tableConstraintType $pkCols) ;
	public final HiveParser.pkUkConstraint_return pkUkConstraint() throws RecognitionException {
		HiveParser.pkUkConstraint_return retval = new HiveParser.pkUkConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope pkCols =null;
		ParserRuleReturnScope tableConstraintType846 =null;

		RewriteRuleSubtreeStream stream_columnParenthesesList=new RewriteRuleSubtreeStream(adaptor,"rule columnParenthesesList");
		RewriteRuleSubtreeStream stream_tableConstraintType=new RewriteRuleSubtreeStream(adaptor,"rule tableConstraintType");

		 pushMsg("pk or uk table level constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2251:5: ( tableConstraintType pkCols= columnParenthesesList -> ^( tableConstraintType $pkCols) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2251:7: tableConstraintType pkCols= columnParenthesesList
			{
			pushFollow(FOLLOW_tableConstraintType_in_pkUkConstraint13603);
			tableConstraintType846=tableConstraintType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableConstraintType.add(tableConstraintType846.getTree());
			pushFollow(FOLLOW_columnParenthesesList_in_pkUkConstraint13607);
			pkCols=columnParenthesesList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnParenthesesList.add(pkCols.getTree());
			// AST REWRITE
			// elements: pkCols, tableConstraintType
			// token labels: 
			// rule labels: pkCols, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_pkCols=new RewriteRuleSubtreeStream(adaptor,"rule pkCols",pkCols!=null?pkCols.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2252:5: -> ^( tableConstraintType $pkCols)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2252:8: ^( tableConstraintType $pkCols)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot(stream_tableConstraintType.nextNode(), root_1);
				adaptor.addChild(root_1, stream_pkCols.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "pkUkConstraint"


	public static class checkConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "checkConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2255:1: checkConstraint : KW_CHECK expression -> ^( TOK_CHECK_CONSTRAINT expression ) ;
	public final HiveParser.checkConstraint_return checkConstraint() throws RecognitionException {
		HiveParser.checkConstraint_return retval = new HiveParser.checkConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CHECK847=null;
		ParserRuleReturnScope expression848 =null;

		ASTNode KW_CHECK847_tree=null;
		RewriteRuleTokenStream stream_KW_CHECK=new RewriteRuleTokenStream(adaptor,"token KW_CHECK");
		RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");

		 pushMsg("CHECK constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2258:5: ( KW_CHECK expression -> ^( TOK_CHECK_CONSTRAINT expression ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2258:7: KW_CHECK expression
			{
			KW_CHECK847=(Token)match(input,KW_CHECK,FOLLOW_KW_CHECK_in_checkConstraint13647); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CHECK.add(KW_CHECK847);

			pushFollow(FOLLOW_expression_in_checkConstraint13649);
			expression848=expression();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_expression.add(expression848.getTree());
			// AST REWRITE
			// elements: expression
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2259:5: -> ^( TOK_CHECK_CONSTRAINT expression )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2259:8: ^( TOK_CHECK_CONSTRAINT expression )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CHECK_CONSTRAINT, "TOK_CHECK_CONSTRAINT"), root_1);
				adaptor.addChild(root_1, stream_expression.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "checkConstraint"


	public static class createForeignKey_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "createForeignKey"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2262:1: createForeignKey : ( KW_CONSTRAINT constraintName= identifier )? KW_FOREIGN KW_KEY fkCols= columnParenthesesList KW_REFERENCES tabName= tableName parCols= columnParenthesesList ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsCreate )? ) -> ^( TOK_FOREIGN_KEY $fkCols $tabName $parCols ( constraintOptsCreate )? ) ;
	public final HiveParser.createForeignKey_return createForeignKey() throws RecognitionException {
		HiveParser.createForeignKey_return retval = new HiveParser.createForeignKey_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT849=null;
		Token KW_FOREIGN850=null;
		Token KW_KEY851=null;
		Token KW_REFERENCES852=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope fkCols =null;
		ParserRuleReturnScope tabName =null;
		ParserRuleReturnScope parCols =null;
		ParserRuleReturnScope constraintOptsCreate853 =null;

		ASTNode KW_CONSTRAINT849_tree=null;
		ASTNode KW_FOREIGN850_tree=null;
		ASTNode KW_KEY851_tree=null;
		ASTNode KW_REFERENCES852_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleTokenStream stream_KW_REFERENCES=new RewriteRuleTokenStream(adaptor,"token KW_REFERENCES");
		RewriteRuleTokenStream stream_KW_FOREIGN=new RewriteRuleTokenStream(adaptor,"token KW_FOREIGN");
		RewriteRuleTokenStream stream_KW_KEY=new RewriteRuleTokenStream(adaptor,"token KW_KEY");
		RewriteRuleSubtreeStream stream_constraintOptsCreate=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsCreate");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_columnParenthesesList=new RewriteRuleSubtreeStream(adaptor,"rule columnParenthesesList");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("foreign key", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2265:5: ( ( KW_CONSTRAINT constraintName= identifier )? KW_FOREIGN KW_KEY fkCols= columnParenthesesList KW_REFERENCES tabName= tableName parCols= columnParenthesesList ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsCreate )? ) -> ^( TOK_FOREIGN_KEY $fkCols $tabName $parCols ( constraintOptsCreate )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2265:7: ( KW_CONSTRAINT constraintName= identifier )? KW_FOREIGN KW_KEY fkCols= columnParenthesesList KW_REFERENCES tabName= tableName parCols= columnParenthesesList ( constraintOptsCreate )?
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2265:7: ( KW_CONSTRAINT constraintName= identifier )?
			int alt255=2;
			int LA255_0 = input.LA(1);
			if ( (LA255_0==KW_CONSTRAINT) ) {
				alt255=1;
			}
			switch (alt255) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2265:8: KW_CONSTRAINT constraintName= identifier
					{
					KW_CONSTRAINT849=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_createForeignKey13689); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT849);

					pushFollow(FOLLOW_identifier_in_createForeignKey13693);
					constraintName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
					}
					break;

			}

			KW_FOREIGN850=(Token)match(input,KW_FOREIGN,FOLLOW_KW_FOREIGN_in_createForeignKey13697); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FOREIGN.add(KW_FOREIGN850);

			KW_KEY851=(Token)match(input,KW_KEY,FOLLOW_KW_KEY_in_createForeignKey13699); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_KEY.add(KW_KEY851);

			pushFollow(FOLLOW_columnParenthesesList_in_createForeignKey13703);
			fkCols=columnParenthesesList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnParenthesesList.add(fkCols.getTree());
			KW_REFERENCES852=(Token)match(input,KW_REFERENCES,FOLLOW_KW_REFERENCES_in_createForeignKey13706); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REFERENCES.add(KW_REFERENCES852);

			pushFollow(FOLLOW_tableName_in_createForeignKey13710);
			tabName=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tabName.getTree());
			pushFollow(FOLLOW_columnParenthesesList_in_createForeignKey13714);
			parCols=columnParenthesesList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnParenthesesList.add(parCols.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2265:160: ( constraintOptsCreate )?
			int alt256=2;
			int LA256_0 = input.LA(1);
			if ( (LA256_0==KW_DISABLE||LA256_0==KW_ENABLE||LA256_0==KW_ENFORCED||LA256_0==KW_NOT) ) {
				alt256=1;
			}
			switch (alt256) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2265:160: constraintOptsCreate
					{
					pushFollow(FOLLOW_constraintOptsCreate_in_createForeignKey13716);
					constraintOptsCreate853=constraintOptsCreate();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsCreate.add(constraintOptsCreate853.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: fkCols, constraintOptsCreate, parCols, parCols, constraintOptsCreate, fkCols, tabName, tabName, constraintName
			// token labels: 
			// rule labels: parCols, tabName, fkCols, constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_parCols=new RewriteRuleSubtreeStream(adaptor,"rule parCols",parCols!=null?parCols.getTree():null);
			RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.getTree():null);
			RewriteRuleSubtreeStream stream_fkCols=new RewriteRuleSubtreeStream(adaptor,"rule fkCols",fkCols!=null?fkCols.getTree():null);
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2266:5: -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsCreate )? )
			if ((constraintName!=null?((ASTNode)constraintName.getTree()):null) != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2267:13: ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FOREIGN_KEY, "TOK_FOREIGN_KEY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2267:31: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_1, stream_fkCols.nextTree());
				adaptor.addChild(root_1, stream_tabName.nextTree());
				adaptor.addChild(root_1, stream_parCols.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2267:96: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2268:5: -> ^( TOK_FOREIGN_KEY $fkCols $tabName $parCols ( constraintOptsCreate )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2268:8: ^( TOK_FOREIGN_KEY $fkCols $tabName $parCols ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FOREIGN_KEY, "TOK_FOREIGN_KEY"), root_1);
				adaptor.addChild(root_1, stream_fkCols.nextTree());
				adaptor.addChild(root_1, stream_tabName.nextTree());
				adaptor.addChild(root_1, stream_parCols.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2268:52: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "createForeignKey"


	public static class alterForeignKeyWithName_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterForeignKeyWithName"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2271:1: alterForeignKeyWithName : KW_CONSTRAINT constraintName= identifier KW_FOREIGN KW_KEY fkCols= columnParenthesesList KW_REFERENCES tabName= tableName parCols= columnParenthesesList ( constraintOptsAlter )? -> ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsAlter )? ) ;
	public final HiveParser.alterForeignKeyWithName_return alterForeignKeyWithName() throws RecognitionException {
		HiveParser.alterForeignKeyWithName_return retval = new HiveParser.alterForeignKeyWithName_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT854=null;
		Token KW_FOREIGN855=null;
		Token KW_KEY856=null;
		Token KW_REFERENCES857=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope fkCols =null;
		ParserRuleReturnScope tabName =null;
		ParserRuleReturnScope parCols =null;
		ParserRuleReturnScope constraintOptsAlter858 =null;

		ASTNode KW_CONSTRAINT854_tree=null;
		ASTNode KW_FOREIGN855_tree=null;
		ASTNode KW_KEY856_tree=null;
		ASTNode KW_REFERENCES857_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleTokenStream stream_KW_REFERENCES=new RewriteRuleTokenStream(adaptor,"token KW_REFERENCES");
		RewriteRuleTokenStream stream_KW_FOREIGN=new RewriteRuleTokenStream(adaptor,"token KW_FOREIGN");
		RewriteRuleTokenStream stream_KW_KEY=new RewriteRuleTokenStream(adaptor,"token KW_KEY");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_constraintOptsAlter=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsAlter");
		RewriteRuleSubtreeStream stream_columnParenthesesList=new RewriteRuleSubtreeStream(adaptor,"rule columnParenthesesList");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("foreign key with key name", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2274:5: ( KW_CONSTRAINT constraintName= identifier KW_FOREIGN KW_KEY fkCols= columnParenthesesList KW_REFERENCES tabName= tableName parCols= columnParenthesesList ( constraintOptsAlter )? -> ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsAlter )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2274:7: KW_CONSTRAINT constraintName= identifier KW_FOREIGN KW_KEY fkCols= columnParenthesesList KW_REFERENCES tabName= tableName parCols= columnParenthesesList ( constraintOptsAlter )?
			{
			KW_CONSTRAINT854=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_alterForeignKeyWithName13809); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT854);

			pushFollow(FOLLOW_identifier_in_alterForeignKeyWithName13813);
			constraintName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
			KW_FOREIGN855=(Token)match(input,KW_FOREIGN,FOLLOW_KW_FOREIGN_in_alterForeignKeyWithName13815); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FOREIGN.add(KW_FOREIGN855);

			KW_KEY856=(Token)match(input,KW_KEY,FOLLOW_KW_KEY_in_alterForeignKeyWithName13817); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_KEY.add(KW_KEY856);

			pushFollow(FOLLOW_columnParenthesesList_in_alterForeignKeyWithName13821);
			fkCols=columnParenthesesList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnParenthesesList.add(fkCols.getTree());
			KW_REFERENCES857=(Token)match(input,KW_REFERENCES,FOLLOW_KW_REFERENCES_in_alterForeignKeyWithName13824); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REFERENCES.add(KW_REFERENCES857);

			pushFollow(FOLLOW_tableName_in_alterForeignKeyWithName13828);
			tabName=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tabName.getTree());
			pushFollow(FOLLOW_columnParenthesesList_in_alterForeignKeyWithName13832);
			parCols=columnParenthesesList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnParenthesesList.add(parCols.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2274:157: ( constraintOptsAlter )?
			int alt257=2;
			int LA257_0 = input.LA(1);
			if ( (LA257_0==KW_DISABLE||LA257_0==KW_ENABLE||LA257_0==KW_ENFORCED||LA257_0==KW_NOT) ) {
				alt257=1;
			}
			switch (alt257) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2274:157: constraintOptsAlter
					{
					pushFollow(FOLLOW_constraintOptsAlter_in_alterForeignKeyWithName13834);
					constraintOptsAlter858=constraintOptsAlter();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsAlter.add(constraintOptsAlter858.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: constraintName, fkCols, parCols, constraintOptsAlter, tabName
			// token labels: 
			// rule labels: parCols, tabName, fkCols, constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_parCols=new RewriteRuleSubtreeStream(adaptor,"rule parCols",parCols!=null?parCols.getTree():null);
			RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.getTree():null);
			RewriteRuleSubtreeStream stream_fkCols=new RewriteRuleSubtreeStream(adaptor,"rule fkCols",fkCols!=null?fkCols.getTree():null);
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2275:5: -> ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsAlter )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2275:8: ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) $fkCols $tabName $parCols ( constraintOptsAlter )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FOREIGN_KEY, "TOK_FOREIGN_KEY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2275:26: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_1, stream_fkCols.nextTree());
				adaptor.addChild(root_1, stream_tabName.nextTree());
				adaptor.addChild(root_1, stream_parCols.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2275:91: ( constraintOptsAlter )?
				if ( stream_constraintOptsAlter.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsAlter.nextTree());
				}
				stream_constraintOptsAlter.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterForeignKeyWithName"


	public static class skewedValueElement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedValueElement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2278:1: skewedValueElement : ( skewedColumnValues | skewedColumnValuePairList );
	public final HiveParser.skewedValueElement_return skewedValueElement() throws RecognitionException {
		HiveParser.skewedValueElement_return retval = new HiveParser.skewedValueElement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope skewedColumnValues859 =null;
		ParserRuleReturnScope skewedColumnValuePairList860 =null;


		 pushMsg("skewed value element", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2281:5: ( skewedColumnValues | skewedColumnValuePairList )
			int alt258=2;
			int LA258_0 = input.LA(1);
			if ( (LA258_0==CharSetName||LA258_0==IntegralLiteral||(LA258_0 >= KW_CURRENT_DATE && LA258_0 <= KW_CURRENT_TIMESTAMP)||LA258_0==KW_DATE||LA258_0==KW_FALSE||LA258_0==KW_NULL||(LA258_0 >= KW_TIMESTAMP && LA258_0 <= KW_TIMESTAMPLOCALTZ)||LA258_0==KW_TRUE||(LA258_0 >= Number && LA258_0 <= NumberLiteral)||LA258_0==StringLiteral) ) {
				alt258=1;
			}
			else if ( (LA258_0==LPAREN) ) {
				alt258=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 258, 0, input);
				throw nvae;
			}

			switch (alt258) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2282:7: skewedColumnValues
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_skewedColumnValues_in_skewedValueElement13898);
					skewedColumnValues859=skewedColumnValues();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, skewedColumnValues859.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2283:8: skewedColumnValuePairList
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_skewedColumnValuePairList_in_skewedValueElement13907);
					skewedColumnValuePairList860=skewedColumnValuePairList();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, skewedColumnValuePairList860.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedValueElement"


	public static class skewedColumnValuePairList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedColumnValuePairList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2286:1: skewedColumnValuePairList : skewedColumnValuePair ( COMMA skewedColumnValuePair )* -> ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ ) ;
	public final HiveParser.skewedColumnValuePairList_return skewedColumnValuePairList() throws RecognitionException {
		HiveParser.skewedColumnValuePairList_return retval = new HiveParser.skewedColumnValuePairList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA862=null;
		ParserRuleReturnScope skewedColumnValuePair861 =null;
		ParserRuleReturnScope skewedColumnValuePair863 =null;

		ASTNode COMMA862_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_skewedColumnValuePair=new RewriteRuleSubtreeStream(adaptor,"rule skewedColumnValuePair");

		 pushMsg("column value pair list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2289:5: ( skewedColumnValuePair ( COMMA skewedColumnValuePair )* -> ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2289:7: skewedColumnValuePair ( COMMA skewedColumnValuePair )*
			{
			pushFollow(FOLLOW_skewedColumnValuePair_in_skewedColumnValuePairList13934);
			skewedColumnValuePair861=skewedColumnValuePair();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedColumnValuePair.add(skewedColumnValuePair861.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2289:29: ( COMMA skewedColumnValuePair )*
			loop259:
			while (true) {
				int alt259=2;
				int LA259_0 = input.LA(1);
				if ( (LA259_0==COMMA) ) {
					alt259=1;
				}

				switch (alt259) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2289:30: COMMA skewedColumnValuePair
					{
					COMMA862=(Token)match(input,COMMA,FOLLOW_COMMA_in_skewedColumnValuePairList13937); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA862);

					pushFollow(FOLLOW_skewedColumnValuePair_in_skewedColumnValuePairList13939);
					skewedColumnValuePair863=skewedColumnValuePair();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_skewedColumnValuePair.add(skewedColumnValuePair863.getTree());
					}
					break;

				default :
					break loop259;
				}
			}

			// AST REWRITE
			// elements: skewedColumnValuePair
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2289:60: -> ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2289:63: ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLVALUE_PAIR, "TOK_TABCOLVALUE_PAIR"), root_1);
				if ( !(stream_skewedColumnValuePair.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_skewedColumnValuePair.hasNext() ) {
					adaptor.addChild(root_1, stream_skewedColumnValuePair.nextTree());
				}
				stream_skewedColumnValuePair.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedColumnValuePairList"


	public static class skewedColumnValuePair_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedColumnValuePair"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2292:1: skewedColumnValuePair : LPAREN colValues= skewedColumnValues RPAREN -> ^( TOK_TABCOLVALUES $colValues) ;
	public final HiveParser.skewedColumnValuePair_return skewedColumnValuePair() throws RecognitionException {
		HiveParser.skewedColumnValuePair_return retval = new HiveParser.skewedColumnValuePair_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN864=null;
		Token RPAREN865=null;
		ParserRuleReturnScope colValues =null;

		ASTNode LPAREN864_tree=null;
		ASTNode RPAREN865_tree=null;
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleSubtreeStream stream_skewedColumnValues=new RewriteRuleSubtreeStream(adaptor,"rule skewedColumnValues");

		 pushMsg("column value pair", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2295:5: ( LPAREN colValues= skewedColumnValues RPAREN -> ^( TOK_TABCOLVALUES $colValues) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2296:7: LPAREN colValues= skewedColumnValues RPAREN
			{
			LPAREN864=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_skewedColumnValuePair13984); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN864);

			pushFollow(FOLLOW_skewedColumnValues_in_skewedColumnValuePair13988);
			colValues=skewedColumnValues();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedColumnValues.add(colValues.getTree());
			RPAREN865=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_skewedColumnValuePair13990); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN865);

			// AST REWRITE
			// elements: colValues
			// token labels: 
			// rule labels: colValues, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_colValues=new RewriteRuleSubtreeStream(adaptor,"rule colValues",colValues!=null?colValues.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2297:7: -> ^( TOK_TABCOLVALUES $colValues)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2297:10: ^( TOK_TABCOLVALUES $colValues)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLVALUES, "TOK_TABCOLVALUES"), root_1);
				adaptor.addChild(root_1, stream_colValues.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedColumnValuePair"


	public static class skewedColumnValues_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedColumnValues"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2300:1: skewedColumnValues : skewedColumnValue ( COMMA skewedColumnValue )* -> ^( TOK_TABCOLVALUE ( skewedColumnValue )+ ) ;
	public final HiveParser.skewedColumnValues_return skewedColumnValues() throws RecognitionException {
		HiveParser.skewedColumnValues_return retval = new HiveParser.skewedColumnValues_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA867=null;
		ParserRuleReturnScope skewedColumnValue866 =null;
		ParserRuleReturnScope skewedColumnValue868 =null;

		ASTNode COMMA867_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_skewedColumnValue=new RewriteRuleSubtreeStream(adaptor,"rule skewedColumnValue");

		 pushMsg("column values", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2303:5: ( skewedColumnValue ( COMMA skewedColumnValue )* -> ^( TOK_TABCOLVALUE ( skewedColumnValue )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2303:7: skewedColumnValue ( COMMA skewedColumnValue )*
			{
			pushFollow(FOLLOW_skewedColumnValue_in_skewedColumnValues14033);
			skewedColumnValue866=skewedColumnValue();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_skewedColumnValue.add(skewedColumnValue866.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2303:25: ( COMMA skewedColumnValue )*
			loop260:
			while (true) {
				int alt260=2;
				int LA260_0 = input.LA(1);
				if ( (LA260_0==COMMA) ) {
					alt260=1;
				}

				switch (alt260) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2303:26: COMMA skewedColumnValue
					{
					COMMA867=(Token)match(input,COMMA,FOLLOW_COMMA_in_skewedColumnValues14036); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA867);

					pushFollow(FOLLOW_skewedColumnValue_in_skewedColumnValues14038);
					skewedColumnValue868=skewedColumnValue();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_skewedColumnValue.add(skewedColumnValue868.getTree());
					}
					break;

				default :
					break loop260;
				}
			}

			// AST REWRITE
			// elements: skewedColumnValue
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2303:52: -> ^( TOK_TABCOLVALUE ( skewedColumnValue )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2303:55: ^( TOK_TABCOLVALUE ( skewedColumnValue )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLVALUE, "TOK_TABCOLVALUE"), root_1);
				if ( !(stream_skewedColumnValue.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_skewedColumnValue.hasNext() ) {
					adaptor.addChild(root_1, stream_skewedColumnValue.nextTree());
				}
				stream_skewedColumnValue.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedColumnValues"


	public static class skewedColumnValue_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedColumnValue"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2306:1: skewedColumnValue : constant ;
	public final HiveParser.skewedColumnValue_return skewedColumnValue() throws RecognitionException {
		HiveParser.skewedColumnValue_return retval = new HiveParser.skewedColumnValue_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope constant869 =null;


		 pushMsg("column value", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2309:5: ( constant )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2310:7: constant
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_constant_in_skewedColumnValue14082);
			constant869=constant();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, constant869.getTree());

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedColumnValue"


	public static class skewedValueLocationElement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "skewedValueLocationElement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2313:1: skewedValueLocationElement : ( skewedColumnValue | skewedColumnValuePair );
	public final HiveParser.skewedValueLocationElement_return skewedValueLocationElement() throws RecognitionException {
		HiveParser.skewedValueLocationElement_return retval = new HiveParser.skewedValueLocationElement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope skewedColumnValue870 =null;
		ParserRuleReturnScope skewedColumnValuePair871 =null;


		 pushMsg("skewed value location element", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2316:5: ( skewedColumnValue | skewedColumnValuePair )
			int alt261=2;
			int LA261_0 = input.LA(1);
			if ( (LA261_0==CharSetName||LA261_0==IntegralLiteral||(LA261_0 >= KW_CURRENT_DATE && LA261_0 <= KW_CURRENT_TIMESTAMP)||LA261_0==KW_DATE||LA261_0==KW_FALSE||LA261_0==KW_NULL||(LA261_0 >= KW_TIMESTAMP && LA261_0 <= KW_TIMESTAMPLOCALTZ)||LA261_0==KW_TRUE||(LA261_0 >= Number && LA261_0 <= NumberLiteral)||LA261_0==StringLiteral) ) {
				alt261=1;
			}
			else if ( (LA261_0==LPAREN) ) {
				alt261=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 261, 0, input);
				throw nvae;
			}

			switch (alt261) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2317:7: skewedColumnValue
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_skewedColumnValue_in_skewedValueLocationElement14116);
					skewedColumnValue870=skewedColumnValue();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, skewedColumnValue870.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2318:8: skewedColumnValuePair
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_skewedColumnValuePair_in_skewedValueLocationElement14125);
					skewedColumnValuePair871=skewedColumnValuePair();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, skewedColumnValuePair871.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "skewedValueLocationElement"


	public static class orderSpecification_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "orderSpecification"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2321:1: orderSpecification : ( KW_ASC | KW_DESC );
	public final HiveParser.orderSpecification_return orderSpecification() throws RecognitionException {
		HiveParser.orderSpecification_return retval = new HiveParser.orderSpecification_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token set872=null;

		ASTNode set872_tree=null;

		 pushMsg("order specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2324:5: ( KW_ASC | KW_DESC )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:
			{
			root_0 = (ASTNode)adaptor.nil();


			set872=input.LT(1);
			if ( input.LA(1)==KW_ASC||input.LA(1)==KW_DESC ) {
				input.consume();
				if ( state.backtracking==0 ) adaptor.addChild(root_0, (ASTNode)adaptor.create(set872));
				state.errorRecovery=false;
				state.failed=false;
			}
			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				MismatchedSetException mse = new MismatchedSetException(null,input);
				throw mse;
			}
			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "orderSpecification"


	public static class nullOrdering_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "nullOrdering"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2326:1: nullOrdering : ( KW_NULLS KW_FIRST -> ^( TOK_NULLS_FIRST ) | KW_NULLS KW_LAST -> ^( TOK_NULLS_LAST ) );
	public final HiveParser.nullOrdering_return nullOrdering() throws RecognitionException {
		HiveParser.nullOrdering_return retval = new HiveParser.nullOrdering_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_NULLS873=null;
		Token KW_FIRST874=null;
		Token KW_NULLS875=null;
		Token KW_LAST876=null;

		ASTNode KW_NULLS873_tree=null;
		ASTNode KW_FIRST874_tree=null;
		ASTNode KW_NULLS875_tree=null;
		ASTNode KW_LAST876_tree=null;
		RewriteRuleTokenStream stream_KW_FIRST=new RewriteRuleTokenStream(adaptor,"token KW_FIRST");
		RewriteRuleTokenStream stream_KW_NULLS=new RewriteRuleTokenStream(adaptor,"token KW_NULLS");
		RewriteRuleTokenStream stream_KW_LAST=new RewriteRuleTokenStream(adaptor,"token KW_LAST");

		 pushMsg("nulls ordering", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2329:5: ( KW_NULLS KW_FIRST -> ^( TOK_NULLS_FIRST ) | KW_NULLS KW_LAST -> ^( TOK_NULLS_LAST ) )
			int alt262=2;
			int LA262_0 = input.LA(1);
			if ( (LA262_0==KW_NULLS) ) {
				int LA262_1 = input.LA(2);
				if ( (LA262_1==KW_FIRST) ) {
					alt262=1;
				}
				else if ( (LA262_1==KW_LAST) ) {
					alt262=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 262, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 262, 0, input);
				throw nvae;
			}

			switch (alt262) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2329:7: KW_NULLS KW_FIRST
					{
					KW_NULLS873=(Token)match(input,KW_NULLS,FOLLOW_KW_NULLS_in_nullOrdering14179); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NULLS.add(KW_NULLS873);

					KW_FIRST874=(Token)match(input,KW_FIRST,FOLLOW_KW_FIRST_in_nullOrdering14181); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_FIRST.add(KW_FIRST874);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2329:25: -> ^( TOK_NULLS_FIRST )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2329:28: ^( TOK_NULLS_FIRST )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_FIRST, "TOK_NULLS_FIRST"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2330:7: KW_NULLS KW_LAST
					{
					KW_NULLS875=(Token)match(input,KW_NULLS,FOLLOW_KW_NULLS_in_nullOrdering14195); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NULLS.add(KW_NULLS875);

					KW_LAST876=(Token)match(input,KW_LAST,FOLLOW_KW_LAST_in_nullOrdering14197); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LAST.add(KW_LAST876);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2330:24: -> ^( TOK_NULLS_LAST )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2330:27: ^( TOK_NULLS_LAST )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_LAST, "TOK_NULLS_LAST"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "nullOrdering"


	public static class columnNameOrder_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameOrder"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2333:1: columnNameOrder : identifier (orderSpec= orderSpecification )? (nullSpec= nullOrdering )? -> {$orderSpec.tree == null && $nullSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) ) -> {$orderSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_DESC}? ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST identifier ) ) -> {$orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) ) -> ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec identifier ) ) ;
	public final HiveParser.columnNameOrder_return columnNameOrder() throws RecognitionException {
		HiveParser.columnNameOrder_return retval = new HiveParser.columnNameOrder_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope orderSpec =null;
		ParserRuleReturnScope nullSpec =null;
		ParserRuleReturnScope identifier877 =null;

		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_nullOrdering=new RewriteRuleSubtreeStream(adaptor,"rule nullOrdering");
		RewriteRuleSubtreeStream stream_orderSpecification=new RewriteRuleSubtreeStream(adaptor,"rule orderSpecification");

		 pushMsg("column name order", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2336:5: ( identifier (orderSpec= orderSpecification )? (nullSpec= nullOrdering )? -> {$orderSpec.tree == null && $nullSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) ) -> {$orderSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_DESC}? ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST identifier ) ) -> {$orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) ) -> ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec identifier ) ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2336:7: identifier (orderSpec= orderSpecification )? (nullSpec= nullOrdering )?
			{
			pushFollow(FOLLOW_identifier_in_columnNameOrder14230);
			identifier877=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(identifier877.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2336:27: (orderSpec= orderSpecification )?
			int alt263=2;
			int LA263_0 = input.LA(1);
			if ( (LA263_0==KW_ASC||LA263_0==KW_DESC) ) {
				alt263=1;
			}
			switch (alt263) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2336:27: orderSpec= orderSpecification
					{
					pushFollow(FOLLOW_orderSpecification_in_columnNameOrder14234);
					orderSpec=orderSpecification();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_orderSpecification.add(orderSpec.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2336:56: (nullSpec= nullOrdering )?
			int alt264=2;
			int LA264_0 = input.LA(1);
			if ( (LA264_0==KW_NULLS) ) {
				alt264=1;
			}
			switch (alt264) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2336:56: nullSpec= nullOrdering
					{
					pushFollow(FOLLOW_nullOrdering_in_columnNameOrder14239);
					nullSpec=nullOrdering();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_nullOrdering.add(nullSpec.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: identifier, identifier, identifier, nullSpec, identifier, identifier, identifier, nullSpec, nullSpec
			// token labels: 
			// rule labels: nullSpec, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_nullSpec=new RewriteRuleSubtreeStream(adaptor,"rule nullSpec",nullSpec!=null?nullSpec.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2337:5: -> {$orderSpec.tree == null && $nullSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) )
			if ((orderSpec!=null?((ASTNode)orderSpec.getTree()):null) == null && (nullSpec!=null?((ASTNode)nullSpec.getTree()):null) == null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2338:13: ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2338:37: ^( TOK_NULLS_FIRST identifier )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_FIRST, "TOK_NULLS_FIRST"), root_2);
				adaptor.addChild(root_2, stream_identifier.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2339:5: -> {$orderSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) )
			if ((orderSpec!=null?((ASTNode)orderSpec.getTree()):null) == null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2340:13: ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2340:37: ^( $nullSpec identifier )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot(stream_nullSpec.nextNode(), root_2);
				adaptor.addChild(root_2, stream_identifier.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2341:5: -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) )
			if ((nullSpec!=null?((ASTNode)nullSpec.getTree()):null) == null && (orderSpec!=null?((ASTNode)orderSpec.getTree()):null).getType()==HiveParser.KW_ASC) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2342:13: ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST identifier ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2342:37: ^( TOK_NULLS_FIRST identifier )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_FIRST, "TOK_NULLS_FIRST"), root_2);
				adaptor.addChild(root_2, stream_identifier.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2343:5: -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_DESC}? ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST identifier ) )
			if ((nullSpec!=null?((ASTNode)nullSpec.getTree()):null) == null && (orderSpec!=null?((ASTNode)orderSpec.getTree()):null).getType()==HiveParser.KW_DESC) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2344:13: ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST identifier ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEDESC, "TOK_TABSORTCOLNAMEDESC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2344:38: ^( TOK_NULLS_LAST identifier )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_LAST, "TOK_NULLS_LAST"), root_2);
				adaptor.addChild(root_2, stream_identifier.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2345:5: -> {$orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) )
			if ((orderSpec!=null?((ASTNode)orderSpec.getTree()):null).getType()==HiveParser.KW_ASC) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2346:13: ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec identifier ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2346:37: ^( $nullSpec identifier )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot(stream_nullSpec.nextNode(), root_2);
				adaptor.addChild(root_2, stream_identifier.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2347:5: -> ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec identifier ) )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2347:8: ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec identifier ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEDESC, "TOK_TABSORTCOLNAMEDESC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2347:33: ^( $nullSpec identifier )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot(stream_nullSpec.nextNode(), root_2);
				adaptor.addChild(root_2, stream_identifier.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameOrder"


	public static class columnNameCommentList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameCommentList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2350:1: columnNameCommentList : columnNameComment ( COMMA columnNameComment )* -> ^( TOK_TABCOLNAME ( columnNameComment )+ ) ;
	public final HiveParser.columnNameCommentList_return columnNameCommentList() throws RecognitionException {
		HiveParser.columnNameCommentList_return retval = new HiveParser.columnNameCommentList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA879=null;
		ParserRuleReturnScope columnNameComment878 =null;
		ParserRuleReturnScope columnNameComment880 =null;

		ASTNode COMMA879_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_columnNameComment=new RewriteRuleSubtreeStream(adaptor,"rule columnNameComment");

		 pushMsg("column name comment list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2353:5: ( columnNameComment ( COMMA columnNameComment )* -> ^( TOK_TABCOLNAME ( columnNameComment )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2353:7: columnNameComment ( COMMA columnNameComment )*
			{
			pushFollow(FOLLOW_columnNameComment_in_columnNameCommentList14436);
			columnNameComment878=columnNameComment();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameComment.add(columnNameComment878.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2353:25: ( COMMA columnNameComment )*
			loop265:
			while (true) {
				int alt265=2;
				int LA265_0 = input.LA(1);
				if ( (LA265_0==COMMA) ) {
					alt265=1;
				}

				switch (alt265) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2353:26: COMMA columnNameComment
					{
					COMMA879=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameCommentList14439); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA879);

					pushFollow(FOLLOW_columnNameComment_in_columnNameCommentList14441);
					columnNameComment880=columnNameComment();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnNameComment.add(columnNameComment880.getTree());
					}
					break;

				default :
					break loop265;
				}
			}

			// AST REWRITE
			// elements: columnNameComment
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2353:52: -> ^( TOK_TABCOLNAME ( columnNameComment )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2353:55: ^( TOK_TABCOLNAME ( columnNameComment )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_1);
				if ( !(stream_columnNameComment.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_columnNameComment.hasNext() ) {
					adaptor.addChild(root_1, stream_columnNameComment.nextTree());
				}
				stream_columnNameComment.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameCommentList"


	public static class columnNameComment_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameComment"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2356:1: columnNameComment : colName= identifier ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_TABCOL $colName TOK_NULL ( $comment)? ) ;
	public final HiveParser.columnNameComment_return columnNameComment() throws RecognitionException {
		HiveParser.columnNameComment_return retval = new HiveParser.columnNameComment_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_COMMENT881=null;
		ParserRuleReturnScope colName =null;

		ASTNode comment_tree=null;
		ASTNode KW_COMMENT881_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");

		 pushMsg("column name comment", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2359:5: (colName= identifier ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_TABCOL $colName TOK_NULL ( $comment)? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2359:7: colName= identifier ( KW_COMMENT comment= StringLiteral )?
			{
			pushFollow(FOLLOW_identifier_in_columnNameComment14481);
			colName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(colName.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2359:26: ( KW_COMMENT comment= StringLiteral )?
			int alt266=2;
			int LA266_0 = input.LA(1);
			if ( (LA266_0==KW_COMMENT) ) {
				alt266=1;
			}
			switch (alt266) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2359:27: KW_COMMENT comment= StringLiteral
					{
					KW_COMMENT881=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_columnNameComment14484); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT881);

					comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_columnNameComment14488); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

					}
					break;

			}

			// AST REWRITE
			// elements: colName, comment
			// token labels: comment
			// rule labels: colName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2360:5: -> ^( TOK_TABCOL $colName TOK_NULL ( $comment)? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2360:8: ^( TOK_TABCOL $colName TOK_NULL ( $comment)? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOL, "TOK_TABCOL"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, (ASTNode)adaptor.create(TOK_NULL, "TOK_NULL"));
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2360:40: ( $comment)?
				if ( stream_comment.hasNext() ) {
					adaptor.addChild(root_1, stream_comment.nextNode());
				}
				stream_comment.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameComment"


	public static class columnRefOrder_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnRefOrder"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2363:1: columnRefOrder : expression (orderSpec= orderSpecification )? (nullSpec= nullOrdering )? -> {$orderSpec.tree == null && $nullSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) ) -> {$orderSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_DESC}? ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST expression ) ) -> {$orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) ) -> ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec expression ) ) ;
	public final HiveParser.columnRefOrder_return columnRefOrder() throws RecognitionException {
		HiveParser.columnRefOrder_return retval = new HiveParser.columnRefOrder_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope orderSpec =null;
		ParserRuleReturnScope nullSpec =null;
		ParserRuleReturnScope expression882 =null;

		RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");
		RewriteRuleSubtreeStream stream_nullOrdering=new RewriteRuleSubtreeStream(adaptor,"rule nullOrdering");
		RewriteRuleSubtreeStream stream_orderSpecification=new RewriteRuleSubtreeStream(adaptor,"rule orderSpecification");

		 pushMsg("column order", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2366:5: ( expression (orderSpec= orderSpecification )? (nullSpec= nullOrdering )? -> {$orderSpec.tree == null && $nullSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) ) -> {$orderSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) ) -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_DESC}? ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST expression ) ) -> {$orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) ) -> ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec expression ) ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2366:7: expression (orderSpec= orderSpecification )? (nullSpec= nullOrdering )?
			{
			pushFollow(FOLLOW_expression_in_columnRefOrder14536);
			expression882=expression();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_expression.add(expression882.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2366:27: (orderSpec= orderSpecification )?
			int alt267=2;
			int LA267_0 = input.LA(1);
			if ( (LA267_0==KW_ASC||LA267_0==KW_DESC) ) {
				alt267=1;
			}
			switch (alt267) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2366:27: orderSpec= orderSpecification
					{
					pushFollow(FOLLOW_orderSpecification_in_columnRefOrder14540);
					orderSpec=orderSpecification();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_orderSpecification.add(orderSpec.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2366:56: (nullSpec= nullOrdering )?
			int alt268=2;
			int LA268_0 = input.LA(1);
			if ( (LA268_0==KW_NULLS) ) {
				alt268=1;
			}
			switch (alt268) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2366:56: nullSpec= nullOrdering
					{
					pushFollow(FOLLOW_nullOrdering_in_columnRefOrder14545);
					nullSpec=nullOrdering();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_nullOrdering.add(nullSpec.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: nullSpec, expression, nullSpec, nullSpec, expression, expression, expression, expression, expression
			// token labels: 
			// rule labels: nullSpec, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_nullSpec=new RewriteRuleSubtreeStream(adaptor,"rule nullSpec",nullSpec!=null?nullSpec.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2367:5: -> {$orderSpec.tree == null && $nullSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) )
			if ((orderSpec!=null?((ASTNode)orderSpec.getTree()):null) == null && (nullSpec!=null?((ASTNode)nullSpec.getTree()):null) == null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2368:13: ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2368:37: ^( TOK_NULLS_FIRST expression )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_FIRST, "TOK_NULLS_FIRST"), root_2);
				adaptor.addChild(root_2, stream_expression.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2369:5: -> {$orderSpec.tree == null}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) )
			if ((orderSpec!=null?((ASTNode)orderSpec.getTree()):null) == null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2370:13: ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2370:37: ^( $nullSpec expression )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot(stream_nullSpec.nextNode(), root_2);
				adaptor.addChild(root_2, stream_expression.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2371:5: -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) )
			if ((nullSpec!=null?((ASTNode)nullSpec.getTree()):null) == null && (orderSpec!=null?((ASTNode)orderSpec.getTree()):null).getType()==HiveParser.KW_ASC) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2372:13: ^( TOK_TABSORTCOLNAMEASC ^( TOK_NULLS_FIRST expression ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2372:37: ^( TOK_NULLS_FIRST expression )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_FIRST, "TOK_NULLS_FIRST"), root_2);
				adaptor.addChild(root_2, stream_expression.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2373:5: -> {$nullSpec.tree == null && $orderSpec.tree.getType()==HiveParser.KW_DESC}? ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST expression ) )
			if ((nullSpec!=null?((ASTNode)nullSpec.getTree()):null) == null && (orderSpec!=null?((ASTNode)orderSpec.getTree()):null).getType()==HiveParser.KW_DESC) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2374:13: ^( TOK_TABSORTCOLNAMEDESC ^( TOK_NULLS_LAST expression ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEDESC, "TOK_TABSORTCOLNAMEDESC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2374:38: ^( TOK_NULLS_LAST expression )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NULLS_LAST, "TOK_NULLS_LAST"), root_2);
				adaptor.addChild(root_2, stream_expression.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2375:5: -> {$orderSpec.tree.getType()==HiveParser.KW_ASC}? ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) )
			if ((orderSpec!=null?((ASTNode)orderSpec.getTree()):null).getType()==HiveParser.KW_ASC) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2376:13: ^( TOK_TABSORTCOLNAMEASC ^( $nullSpec expression ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2376:37: ^( $nullSpec expression )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot(stream_nullSpec.nextNode(), root_2);
				adaptor.addChild(root_2, stream_expression.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2377:5: -> ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec expression ) )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2377:8: ^( TOK_TABSORTCOLNAMEDESC ^( $nullSpec expression ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABSORTCOLNAMEDESC, "TOK_TABSORTCOLNAMEDESC"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2377:33: ^( $nullSpec expression )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot(stream_nullSpec.nextNode(), root_2);
				adaptor.addChild(root_2, stream_expression.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnRefOrder"


	public static class columnNameType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2380:1: columnNameType : colName= identifier colType ( KW_COMMENT comment= StringLiteral )? -> {containExcludedCharForCreateTableColumnName($colName.text)}? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) ;
	public final HiveParser.columnNameType_return columnNameType() throws RecognitionException {
		HiveParser.columnNameType_return retval = new HiveParser.columnNameType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_COMMENT884=null;
		ParserRuleReturnScope colName =null;
		ParserRuleReturnScope colType883 =null;

		ASTNode comment_tree=null;
		ASTNode KW_COMMENT884_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");

		 pushMsg("column specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2383:5: (colName= identifier colType ( KW_COMMENT comment= StringLiteral )? -> {containExcludedCharForCreateTableColumnName($colName.text)}? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2383:7: colName= identifier colType ( KW_COMMENT comment= StringLiteral )?
			{
			pushFollow(FOLLOW_identifier_in_columnNameType14744);
			colName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(colName.getTree());
			pushFollow(FOLLOW_colType_in_columnNameType14746);
			colType883=colType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_colType.add(colType883.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2383:34: ( KW_COMMENT comment= StringLiteral )?
			int alt269=2;
			int LA269_0 = input.LA(1);
			if ( (LA269_0==KW_COMMENT) ) {
				alt269=1;
			}
			switch (alt269) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2383:35: KW_COMMENT comment= StringLiteral
					{
					KW_COMMENT884=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_columnNameType14749); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT884);

					comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_columnNameType14753); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

					}
					break;

			}

			// AST REWRITE
			// elements: colType, comment, colName, colName, colType
			// token labels: comment
			// rule labels: colName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2384:5: -> {containExcludedCharForCreateTableColumnName($colName.text)}?
			if (containExcludedCharForCreateTableColumnName((colName!=null?input.toString(colName.start,colName.stop):null))) {
				adaptor.addChild(root_0, throwColumnNameException());
			}

			else // 2385:5: -> {$comment == null}? ^( TOK_TABCOL $colName colType )
			if (comment == null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2385:28: ^( TOK_TABCOL $colName colType )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOL, "TOK_TABCOL"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, stream_colType.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2386:5: -> ^( TOK_TABCOL $colName colType $comment)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2386:28: ^( TOK_TABCOL $colName colType $comment)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOL, "TOK_TABCOL"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, stream_colType.nextTree());
				adaptor.addChild(root_1, stream_comment.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameType"


	public static class columnNameTypeOrConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameTypeOrConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2389:1: columnNameTypeOrConstraint : ( ( tableConstraint ) | ( columnNameTypeConstraint ) );
	public final HiveParser.columnNameTypeOrConstraint_return columnNameTypeOrConstraint() throws RecognitionException {
		HiveParser.columnNameTypeOrConstraint_return retval = new HiveParser.columnNameTypeOrConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope tableConstraint885 =null;
		ParserRuleReturnScope columnNameTypeConstraint886 =null;


		 pushMsg("column name or constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2392:5: ( ( tableConstraint ) | ( columnNameTypeConstraint ) )
			int alt270=2;
			alt270 = dfa270.predict(input);
			switch (alt270) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2392:7: ( tableConstraint )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2392:7: ( tableConstraint )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2392:9: tableConstraint
					{
					pushFollow(FOLLOW_tableConstraint_in_columnNameTypeOrConstraint14849);
					tableConstraint885=tableConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, tableConstraint885.getTree());

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2393:7: ( columnNameTypeConstraint )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2393:7: ( columnNameTypeConstraint )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2393:9: columnNameTypeConstraint
					{
					pushFollow(FOLLOW_columnNameTypeConstraint_in_columnNameTypeOrConstraint14861);
					columnNameTypeConstraint886=columnNameTypeConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, columnNameTypeConstraint886.getTree());

					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameTypeOrConstraint"


	public static class tableConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2396:1: tableConstraint : ( ( createForeignKey ) | ( createConstraint ) );
	public final HiveParser.tableConstraint_return tableConstraint() throws RecognitionException {
		HiveParser.tableConstraint_return retval = new HiveParser.tableConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope createForeignKey887 =null;
		ParserRuleReturnScope createConstraint888 =null;


		 pushMsg("table constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2399:5: ( ( createForeignKey ) | ( createConstraint ) )
			int alt271=2;
			switch ( input.LA(1) ) {
			case KW_CONSTRAINT:
				{
				int LA271_1 = input.LA(2);
				if ( (LA271_1==Identifier) ) {
					int LA271_6 = input.LA(3);
					if ( (LA271_6==KW_FOREIGN) ) {
						alt271=1;
					}
					else if ( (LA271_6==KW_CHECK||LA271_6==KW_PRIMARY||LA271_6==KW_UNIQUE) ) {
						alt271=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 271, 6, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}
				else if ( ((LA271_1 >= KW_ABORT && LA271_1 <= KW_AFTER)||LA271_1==KW_ALLOC_FRACTION||LA271_1==KW_ANALYZE||LA271_1==KW_ARCHIVE||LA271_1==KW_ASC||(LA271_1 >= KW_AUTOCOMMIT && LA271_1 <= KW_BEFORE)||(LA271_1 >= KW_BUCKET && LA271_1 <= KW_BUCKETS)||(LA271_1 >= KW_CACHE && LA271_1 <= KW_CASCADE)||LA271_1==KW_CHANGE||(LA271_1 >= KW_CHECK && LA271_1 <= KW_COLLECTION)||(LA271_1 >= KW_COLUMNS && LA271_1 <= KW_COMMENT)||(LA271_1 >= KW_COMPACT && LA271_1 <= KW_CONCATENATE)||LA271_1==KW_CONTINUE||LA271_1==KW_DATA||LA271_1==KW_DATABASES||(LA271_1 >= KW_DATETIME && LA271_1 <= KW_DBPROPERTIES)||(LA271_1 >= KW_DEFAULT && LA271_1 <= KW_DEFINED)||(LA271_1 >= KW_DELIMITED && LA271_1 <= KW_DESC)||(LA271_1 >= KW_DETAIL && LA271_1 <= KW_DISABLE)||(LA271_1 >= KW_DISTRIBUTE && LA271_1 <= KW_DO)||LA271_1==KW_DOW||(LA271_1 >= KW_DUMP && LA271_1 <= KW_ELEM_TYPE)||LA271_1==KW_ENABLE||(LA271_1 >= KW_ENFORCED && LA271_1 <= KW_ESCAPED)||LA271_1==KW_EXCLUSIVE||(LA271_1 >= KW_EXPLAIN && LA271_1 <= KW_EXPRESSION)||(LA271_1 >= KW_FIELDS && LA271_1 <= KW_FIRST)||(LA271_1 >= KW_FORMAT && LA271_1 <= KW_FORMATTED)||LA271_1==KW_FUNCTIONS||(LA271_1 >= KW_HOUR && LA271_1 <= KW_IDXPROPERTIES)||(LA271_1 >= KW_INDEX && LA271_1 <= KW_INDEXES)||(LA271_1 >= KW_INPATH && LA271_1 <= KW_INPUTFORMAT)||(LA271_1 >= KW_ISOLATION && LA271_1 <= KW_JAR)||(LA271_1 >= KW_KEY && LA271_1 <= KW_LAST)||LA271_1==KW_LEVEL||(LA271_1 >= KW_LIMIT && LA271_1 <= KW_LOAD)||(LA271_1 >= KW_LOCATION && LA271_1 <= KW_LONG)||LA271_1==KW_MANAGEMENT||(LA271_1 >= KW_MAPJOIN && LA271_1 <= KW_MATERIALIZED)||LA271_1==KW_METADATA||(LA271_1 >= KW_MINUTE && LA271_1 <= KW_MONTH)||(LA271_1 >= KW_MOVE && LA271_1 <= KW_MSCK)||(LA271_1 >= KW_NORELY && LA271_1 <= KW_NOSCAN)||LA271_1==KW_NOVALIDATE||LA271_1==KW_NULLS||LA271_1==KW_OFFSET||(LA271_1 >= KW_OPERATOR && LA271_1 <= KW_OPTION)||(LA271_1 >= KW_OUTPUTDRIVER && LA271_1 <= KW_OUTPUTFORMAT)||(LA271_1 >= KW_OVERWRITE && LA271_1 <= KW_OWNER)||(LA271_1 >= KW_PARTITIONED && LA271_1 <= KW_PATH)||(LA271_1 >= KW_PLAN && LA271_1 <= KW_POOL)||LA271_1==KW_PRINCIPALS||(LA271_1 >= KW_PURGE && LA271_1 <= KW_QUERY_PARALLELISM)||LA271_1==KW_READ||(LA271_1 >= KW_REBUILD && LA271_1 <= KW_RECORDWRITER)||(LA271_1 >= KW_RELOAD && LA271_1 <= KW_RESTRICT)||LA271_1==KW_REWRITE||(LA271_1 >= KW_ROLE && LA271_1 <= KW_ROLES)||(LA271_1 >= KW_SCHEDULING_POLICY && LA271_1 <= KW_SECOND)||(LA271_1 >= KW_SEMI && LA271_1 <= KW_SERVER)||(LA271_1 >= KW_SETS && LA271_1 <= KW_SKEWED)||(LA271_1 >= KW_SNAPSHOT && LA271_1 <= KW_SSL)||(LA271_1 >= KW_STATISTICS && LA271_1 <= KW_SUMMARY)||LA271_1==KW_TABLES||(LA271_1 >= KW_TBLPROPERTIES && LA271_1 <= KW_TERMINATED)||LA271_1==KW_TINYINT||(LA271_1 >= KW_TOUCH && LA271_1 <= KW_TRANSACTIONS)||LA271_1==KW_UNARCHIVE||LA271_1==KW_UNDO||LA271_1==KW_UNIONTYPE||(LA271_1 >= KW_UNLOCK && LA271_1 <= KW_UNSIGNED)||(LA271_1 >= KW_URI && LA271_1 <= KW_USE)||(LA271_1 >= KW_UTC && LA271_1 <= KW_VALIDATE)||LA271_1==KW_VALUE_TYPE||(LA271_1 >= KW_VECTORIZATION && LA271_1 <= KW_WEEK)||LA271_1==KW_WHILE||(LA271_1 >= KW_WORK && LA271_1 <= KW_ZONE)||LA271_1==KW_BATCH||LA271_1==KW_DAYOFWEEK||LA271_1==KW_HOLD_DDLTIME||LA271_1==KW_IGNORE||LA271_1==KW_NO_DROP||LA271_1==KW_OFFLINE||LA271_1==KW_PROTECTION||LA271_1==KW_READONLY||LA271_1==KW_TIMESTAMPTZ) ) {
					int LA271_7 = input.LA(3);
					if ( (LA271_7==KW_FOREIGN) ) {
						alt271=1;
					}
					else if ( (LA271_7==KW_CHECK||LA271_7==KW_PRIMARY||LA271_7==KW_UNIQUE) ) {
						alt271=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 271, 7, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 271, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_FOREIGN:
				{
				alt271=1;
				}
				break;
			case KW_CHECK:
			case KW_PRIMARY:
			case KW_UNIQUE:
				{
				alt271=2;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 271, 0, input);
				throw nvae;
			}
			switch (alt271) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2399:7: ( createForeignKey )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2399:7: ( createForeignKey )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2399:9: createForeignKey
					{
					pushFollow(FOLLOW_createForeignKey_in_tableConstraint14892);
					createForeignKey887=createForeignKey();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createForeignKey887.getTree());

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2400:7: ( createConstraint )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2400:7: ( createConstraint )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2400:9: createConstraint
					{
					pushFollow(FOLLOW_createConstraint_in_tableConstraint14904);
					createConstraint888=createConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, createConstraint888.getTree());

					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableConstraint"


	public static class columnNameTypeConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameTypeConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2403:1: columnNameTypeConstraint : colName= identifier colType ( columnConstraint[$colName.tree] )? ( KW_COMMENT comment= StringLiteral )? -> {containExcludedCharForCreateTableColumnName($colName.text)}? -> ^( TOK_TABCOL $colName colType ( $comment)? ( columnConstraint )? ) ;
	public final HiveParser.columnNameTypeConstraint_return columnNameTypeConstraint() throws RecognitionException {
		HiveParser.columnNameTypeConstraint_return retval = new HiveParser.columnNameTypeConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token KW_COMMENT891=null;
		ParserRuleReturnScope colName =null;
		ParserRuleReturnScope colType889 =null;
		ParserRuleReturnScope columnConstraint890 =null;

		ASTNode comment_tree=null;
		ASTNode KW_COMMENT891_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_columnConstraint=new RewriteRuleSubtreeStream(adaptor,"rule columnConstraint");
		RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");

		 pushMsg("column specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2406:5: (colName= identifier colType ( columnConstraint[$colName.tree] )? ( KW_COMMENT comment= StringLiteral )? -> {containExcludedCharForCreateTableColumnName($colName.text)}? -> ^( TOK_TABCOL $colName colType ( $comment)? ( columnConstraint )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2406:7: colName= identifier colType ( columnConstraint[$colName.tree] )? ( KW_COMMENT comment= StringLiteral )?
			{
			pushFollow(FOLLOW_identifier_in_columnNameTypeConstraint14935);
			colName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(colName.getTree());
			pushFollow(FOLLOW_colType_in_columnNameTypeConstraint14937);
			colType889=colType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_colType.add(colType889.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2406:34: ( columnConstraint[$colName.tree] )?
			int alt272=2;
			int LA272_0 = input.LA(1);
			if ( (LA272_0==KW_CHECK||LA272_0==KW_CONSTRAINT||LA272_0==KW_DEFAULT||LA272_0==KW_NOT||LA272_0==KW_PRIMARY||LA272_0==KW_REFERENCES||LA272_0==KW_UNIQUE) ) {
				alt272=1;
			}
			switch (alt272) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2406:34: columnConstraint[$colName.tree]
					{
					pushFollow(FOLLOW_columnConstraint_in_columnNameTypeConstraint14939);
					columnConstraint890=columnConstraint((colName!=null?((ASTNode)colName.getTree()):null));
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnConstraint.add(columnConstraint890.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2406:67: ( KW_COMMENT comment= StringLiteral )?
			int alt273=2;
			int LA273_0 = input.LA(1);
			if ( (LA273_0==KW_COMMENT) ) {
				alt273=1;
			}
			switch (alt273) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2406:68: KW_COMMENT comment= StringLiteral
					{
					KW_COMMENT891=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_columnNameTypeConstraint14944); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT891);

					comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_columnNameTypeConstraint14948); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

					}
					break;

			}

			// AST REWRITE
			// elements: columnConstraint, colName, comment, colType
			// token labels: comment
			// rule labels: colName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2407:5: -> {containExcludedCharForCreateTableColumnName($colName.text)}?
			if (containExcludedCharForCreateTableColumnName((colName!=null?input.toString(colName.start,colName.stop):null))) {
				adaptor.addChild(root_0, throwColumnNameException());
			}

			else // 2408:5: -> ^( TOK_TABCOL $colName colType ( $comment)? ( columnConstraint )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2408:8: ^( TOK_TABCOL $colName colType ( $comment)? ( columnConstraint )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOL, "TOK_TABCOL"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, stream_colType.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2408:39: ( $comment)?
				if ( stream_comment.hasNext() ) {
					adaptor.addChild(root_1, stream_comment.nextNode());
				}
				stream_comment.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2408:48: ( columnConstraint )?
				if ( stream_columnConstraint.hasNext() ) {
					adaptor.addChild(root_1, stream_columnConstraint.nextTree());
				}
				stream_columnConstraint.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameTypeConstraint"


	public static class columnConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2411:1: columnConstraint[CommonTree fkColName] : ( ( foreignKeyConstraint[$fkColName] ) | ( colConstraint ) );
	public final HiveParser.columnConstraint_return columnConstraint(CommonTree fkColName) throws RecognitionException {
		HiveParser.columnConstraint_return retval = new HiveParser.columnConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope foreignKeyConstraint892 =null;
		ParserRuleReturnScope colConstraint893 =null;


		 pushMsg("column constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2414:5: ( ( foreignKeyConstraint[$fkColName] ) | ( colConstraint ) )
			int alt274=2;
			switch ( input.LA(1) ) {
			case KW_CONSTRAINT:
				{
				int LA274_1 = input.LA(2);
				if ( (LA274_1==Identifier) ) {
					int LA274_8 = input.LA(3);
					if ( (LA274_8==KW_REFERENCES) ) {
						alt274=1;
					}
					else if ( (LA274_8==KW_CHECK||LA274_8==KW_DEFAULT||LA274_8==KW_NOT||LA274_8==KW_PRIMARY||LA274_8==KW_UNIQUE) ) {
						alt274=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 274, 8, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}
				else if ( ((LA274_1 >= KW_ABORT && LA274_1 <= KW_AFTER)||LA274_1==KW_ALLOC_FRACTION||LA274_1==KW_ANALYZE||LA274_1==KW_ARCHIVE||LA274_1==KW_ASC||(LA274_1 >= KW_AUTOCOMMIT && LA274_1 <= KW_BEFORE)||(LA274_1 >= KW_BUCKET && LA274_1 <= KW_BUCKETS)||(LA274_1 >= KW_CACHE && LA274_1 <= KW_CASCADE)||LA274_1==KW_CHANGE||(LA274_1 >= KW_CHECK && LA274_1 <= KW_COLLECTION)||(LA274_1 >= KW_COLUMNS && LA274_1 <= KW_COMMENT)||(LA274_1 >= KW_COMPACT && LA274_1 <= KW_CONCATENATE)||LA274_1==KW_CONTINUE||LA274_1==KW_DATA||LA274_1==KW_DATABASES||(LA274_1 >= KW_DATETIME && LA274_1 <= KW_DBPROPERTIES)||(LA274_1 >= KW_DEFAULT && LA274_1 <= KW_DEFINED)||(LA274_1 >= KW_DELIMITED && LA274_1 <= KW_DESC)||(LA274_1 >= KW_DETAIL && LA274_1 <= KW_DISABLE)||(LA274_1 >= KW_DISTRIBUTE && LA274_1 <= KW_DO)||LA274_1==KW_DOW||(LA274_1 >= KW_DUMP && LA274_1 <= KW_ELEM_TYPE)||LA274_1==KW_ENABLE||(LA274_1 >= KW_ENFORCED && LA274_1 <= KW_ESCAPED)||LA274_1==KW_EXCLUSIVE||(LA274_1 >= KW_EXPLAIN && LA274_1 <= KW_EXPRESSION)||(LA274_1 >= KW_FIELDS && LA274_1 <= KW_FIRST)||(LA274_1 >= KW_FORMAT && LA274_1 <= KW_FORMATTED)||LA274_1==KW_FUNCTIONS||(LA274_1 >= KW_HOUR && LA274_1 <= KW_IDXPROPERTIES)||(LA274_1 >= KW_INDEX && LA274_1 <= KW_INDEXES)||(LA274_1 >= KW_INPATH && LA274_1 <= KW_INPUTFORMAT)||(LA274_1 >= KW_ISOLATION && LA274_1 <= KW_JAR)||(LA274_1 >= KW_KEY && LA274_1 <= KW_LAST)||LA274_1==KW_LEVEL||(LA274_1 >= KW_LIMIT && LA274_1 <= KW_LOAD)||(LA274_1 >= KW_LOCATION && LA274_1 <= KW_LONG)||LA274_1==KW_MANAGEMENT||(LA274_1 >= KW_MAPJOIN && LA274_1 <= KW_MATERIALIZED)||LA274_1==KW_METADATA||(LA274_1 >= KW_MINUTE && LA274_1 <= KW_MONTH)||(LA274_1 >= KW_MOVE && LA274_1 <= KW_MSCK)||(LA274_1 >= KW_NORELY && LA274_1 <= KW_NOSCAN)||LA274_1==KW_NOVALIDATE||LA274_1==KW_NULLS||LA274_1==KW_OFFSET||(LA274_1 >= KW_OPERATOR && LA274_1 <= KW_OPTION)||(LA274_1 >= KW_OUTPUTDRIVER && LA274_1 <= KW_OUTPUTFORMAT)||(LA274_1 >= KW_OVERWRITE && LA274_1 <= KW_OWNER)||(LA274_1 >= KW_PARTITIONED && LA274_1 <= KW_PATH)||(LA274_1 >= KW_PLAN && LA274_1 <= KW_POOL)||LA274_1==KW_PRINCIPALS||(LA274_1 >= KW_PURGE && LA274_1 <= KW_QUERY_PARALLELISM)||LA274_1==KW_READ||(LA274_1 >= KW_REBUILD && LA274_1 <= KW_RECORDWRITER)||(LA274_1 >= KW_RELOAD && LA274_1 <= KW_RESTRICT)||LA274_1==KW_REWRITE||(LA274_1 >= KW_ROLE && LA274_1 <= KW_ROLES)||(LA274_1 >= KW_SCHEDULING_POLICY && LA274_1 <= KW_SECOND)||(LA274_1 >= KW_SEMI && LA274_1 <= KW_SERVER)||(LA274_1 >= KW_SETS && LA274_1 <= KW_SKEWED)||(LA274_1 >= KW_SNAPSHOT && LA274_1 <= KW_SSL)||(LA274_1 >= KW_STATISTICS && LA274_1 <= KW_SUMMARY)||LA274_1==KW_TABLES||(LA274_1 >= KW_TBLPROPERTIES && LA274_1 <= KW_TERMINATED)||LA274_1==KW_TINYINT||(LA274_1 >= KW_TOUCH && LA274_1 <= KW_TRANSACTIONS)||LA274_1==KW_UNARCHIVE||LA274_1==KW_UNDO||LA274_1==KW_UNIONTYPE||(LA274_1 >= KW_UNLOCK && LA274_1 <= KW_UNSIGNED)||(LA274_1 >= KW_URI && LA274_1 <= KW_USE)||(LA274_1 >= KW_UTC && LA274_1 <= KW_VALIDATE)||LA274_1==KW_VALUE_TYPE||(LA274_1 >= KW_VECTORIZATION && LA274_1 <= KW_WEEK)||LA274_1==KW_WHILE||(LA274_1 >= KW_WORK && LA274_1 <= KW_ZONE)||LA274_1==KW_BATCH||LA274_1==KW_DAYOFWEEK||LA274_1==KW_HOLD_DDLTIME||LA274_1==KW_IGNORE||LA274_1==KW_NO_DROP||LA274_1==KW_OFFLINE||LA274_1==KW_PROTECTION||LA274_1==KW_READONLY||LA274_1==KW_TIMESTAMPTZ) ) {
					int LA274_9 = input.LA(3);
					if ( (LA274_9==KW_REFERENCES) ) {
						alt274=1;
					}
					else if ( (LA274_9==KW_CHECK||LA274_9==KW_DEFAULT||LA274_9==KW_NOT||LA274_9==KW_PRIMARY||LA274_9==KW_UNIQUE) ) {
						alt274=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 274, 9, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 274, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_REFERENCES:
				{
				alt274=1;
				}
				break;
			case KW_CHECK:
			case KW_DEFAULT:
			case KW_NOT:
			case KW_PRIMARY:
			case KW_UNIQUE:
				{
				alt274=2;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 274, 0, input);
				throw nvae;
			}
			switch (alt274) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2414:7: ( foreignKeyConstraint[$fkColName] )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2414:7: ( foreignKeyConstraint[$fkColName] )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2414:9: foreignKeyConstraint[$fkColName]
					{
					pushFollow(FOLLOW_foreignKeyConstraint_in_columnConstraint15012);
					foreignKeyConstraint892=foreignKeyConstraint(fkColName);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, foreignKeyConstraint892.getTree());

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2415:7: ( colConstraint )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2415:7: ( colConstraint )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2415:9: colConstraint
					{
					pushFollow(FOLLOW_colConstraint_in_columnConstraint15025);
					colConstraint893=colConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, colConstraint893.getTree());

					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnConstraint"


	public static class foreignKeyConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "foreignKeyConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2418:1: foreignKeyConstraint[CommonTree fkColName] : ( KW_CONSTRAINT constraintName= identifier )? KW_REFERENCES tabName= tableName LPAREN colName= columnName RPAREN ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? ) -> ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? ) ;
	public final HiveParser.foreignKeyConstraint_return foreignKeyConstraint(CommonTree fkColName) throws RecognitionException {
		HiveParser.foreignKeyConstraint_return retval = new HiveParser.foreignKeyConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT894=null;
		Token KW_REFERENCES895=null;
		Token LPAREN896=null;
		Token RPAREN897=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope tabName =null;
		ParserRuleReturnScope colName =null;
		ParserRuleReturnScope constraintOptsCreate898 =null;

		ASTNode KW_CONSTRAINT894_tree=null;
		ASTNode KW_REFERENCES895_tree=null;
		ASTNode LPAREN896_tree=null;
		ASTNode RPAREN897_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_REFERENCES=new RewriteRuleTokenStream(adaptor,"token KW_REFERENCES");
		RewriteRuleSubtreeStream stream_constraintOptsCreate=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsCreate");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
		RewriteRuleSubtreeStream stream_columnName=new RewriteRuleSubtreeStream(adaptor,"rule columnName");

		 pushMsg("column constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2421:5: ( ( KW_CONSTRAINT constraintName= identifier )? KW_REFERENCES tabName= tableName LPAREN colName= columnName RPAREN ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? ) -> ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2421:7: ( KW_CONSTRAINT constraintName= identifier )? KW_REFERENCES tabName= tableName LPAREN colName= columnName RPAREN ( constraintOptsCreate )?
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2421:7: ( KW_CONSTRAINT constraintName= identifier )?
			int alt275=2;
			int LA275_0 = input.LA(1);
			if ( (LA275_0==KW_CONSTRAINT) ) {
				alt275=1;
			}
			switch (alt275) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2421:8: KW_CONSTRAINT constraintName= identifier
					{
					KW_CONSTRAINT894=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_foreignKeyConstraint15056); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT894);

					pushFollow(FOLLOW_identifier_in_foreignKeyConstraint15060);
					constraintName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
					}
					break;

			}

			KW_REFERENCES895=(Token)match(input,KW_REFERENCES,FOLLOW_KW_REFERENCES_in_foreignKeyConstraint15064); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REFERENCES.add(KW_REFERENCES895);

			pushFollow(FOLLOW_tableName_in_foreignKeyConstraint15068);
			tabName=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tabName.getTree());
			LPAREN896=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_foreignKeyConstraint15070); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN896);

			pushFollow(FOLLOW_columnName_in_foreignKeyConstraint15074);
			colName=columnName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnName.add(colName.getTree());
			RPAREN897=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_foreignKeyConstraint15076); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN897);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2421:115: ( constraintOptsCreate )?
			int alt276=2;
			int LA276_0 = input.LA(1);
			if ( (LA276_0==KW_DISABLE||LA276_0==KW_ENABLE||LA276_0==KW_ENFORCED||LA276_0==KW_NOT) ) {
				alt276=1;
			}
			switch (alt276) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2421:115: constraintOptsCreate
					{
					pushFollow(FOLLOW_constraintOptsCreate_in_foreignKeyConstraint15078);
					constraintOptsCreate898=constraintOptsCreate();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsCreate.add(constraintOptsCreate898.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: colName, constraintOptsCreate, tabName, colName, constraintName, tabName, constraintOptsCreate
			// token labels: 
			// rule labels: colName, tabName, constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.getTree():null);
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2422:5: -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? )
			if ((constraintName!=null?((ASTNode)constraintName.getTree()):null) != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2423:13: ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FOREIGN_KEY, "TOK_FOREIGN_KEY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2423:31: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2423:70: ^( TOK_TABCOLNAME )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, fkColName);
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_1, stream_tabName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2423:110: ^( TOK_TABCOLNAME $colName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, stream_colName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2423:137: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2424:5: -> ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2424:8: ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FOREIGN_KEY, "TOK_FOREIGN_KEY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2424:26: ^( TOK_TABCOLNAME )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, fkColName);
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_1, stream_tabName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2424:66: ^( TOK_TABCOLNAME $colName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, stream_colName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2424:93: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "foreignKeyConstraint"


	public static class colConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "colConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2427:1: colConstraint : ( KW_CONSTRAINT constraintName= identifier )? columnConstraintType ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? ) -> ^( ( constraintOptsCreate )? ) ;
	public final HiveParser.colConstraint_return colConstraint() throws RecognitionException {
		HiveParser.colConstraint_return retval = new HiveParser.colConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT899=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope columnConstraintType900 =null;
		ParserRuleReturnScope constraintOptsCreate901 =null;

		ASTNode KW_CONSTRAINT899_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleSubtreeStream stream_constraintOptsCreate=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsCreate");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_columnConstraintType=new RewriteRuleSubtreeStream(adaptor,"rule columnConstraintType");

		 pushMsg("column constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2430:5: ( ( KW_CONSTRAINT constraintName= identifier )? columnConstraintType ( constraintOptsCreate )? -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? ) -> ^( ( constraintOptsCreate )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2430:7: ( KW_CONSTRAINT constraintName= identifier )? columnConstraintType ( constraintOptsCreate )?
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2430:7: ( KW_CONSTRAINT constraintName= identifier )?
			int alt277=2;
			int LA277_0 = input.LA(1);
			if ( (LA277_0==KW_CONSTRAINT) ) {
				alt277=1;
			}
			switch (alt277) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2430:8: KW_CONSTRAINT constraintName= identifier
					{
					KW_CONSTRAINT899=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_colConstraint15186); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT899);

					pushFollow(FOLLOW_identifier_in_colConstraint15190);
					constraintName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_columnConstraintType_in_colConstraint15194);
			columnConstraintType900=columnConstraintType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnConstraintType.add(columnConstraintType900.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2430:71: ( constraintOptsCreate )?
			int alt278=2;
			int LA278_0 = input.LA(1);
			if ( (LA278_0==KW_DISABLE||LA278_0==KW_ENABLE||LA278_0==KW_ENFORCED||LA278_0==KW_NOT) ) {
				alt278=1;
			}
			switch (alt278) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2430:71: constraintOptsCreate
					{
					pushFollow(FOLLOW_constraintOptsCreate_in_colConstraint15196);
					constraintOptsCreate901=constraintOptsCreate();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsCreate.add(constraintOptsCreate901.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: constraintOptsCreate, constraintName, constraintOptsCreate
			// token labels: 
			// rule labels: constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2431:5: -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? )
			if ((constraintName!=null?((ASTNode)constraintName.getTree()):null) != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2432:13: ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((columnConstraintType900!=null?((ASTNode)columnConstraintType900.getTree()):null), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2432:44: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2432:83: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2433:5: -> ^( ( constraintOptsCreate )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2433:8: ^( ( constraintOptsCreate )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((columnConstraintType900!=null?((ASTNode)columnConstraintType900.getTree()):null), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2433:39: ( constraintOptsCreate )?
				if ( stream_constraintOptsCreate.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsCreate.nextTree());
				}
				stream_constraintOptsCreate.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "colConstraint"


	public static class alterColumnConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterColumnConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2436:1: alterColumnConstraint[CommonTree fkColName] : ( ( alterForeignKeyConstraint[$fkColName] ) | ( alterColConstraint ) );
	public final HiveParser.alterColumnConstraint_return alterColumnConstraint(CommonTree fkColName) throws RecognitionException {
		HiveParser.alterColumnConstraint_return retval = new HiveParser.alterColumnConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope alterForeignKeyConstraint902 =null;
		ParserRuleReturnScope alterColConstraint903 =null;


		 pushMsg("alter column constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2439:5: ( ( alterForeignKeyConstraint[$fkColName] ) | ( alterColConstraint ) )
			int alt279=2;
			switch ( input.LA(1) ) {
			case KW_CONSTRAINT:
				{
				int LA279_1 = input.LA(2);
				if ( (LA279_1==Identifier) ) {
					int LA279_8 = input.LA(3);
					if ( (LA279_8==KW_REFERENCES) ) {
						alt279=1;
					}
					else if ( (LA279_8==KW_CHECK||LA279_8==KW_DEFAULT||LA279_8==KW_NOT||LA279_8==KW_PRIMARY||LA279_8==KW_UNIQUE) ) {
						alt279=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 279, 8, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}
				else if ( ((LA279_1 >= KW_ABORT && LA279_1 <= KW_AFTER)||LA279_1==KW_ALLOC_FRACTION||LA279_1==KW_ANALYZE||LA279_1==KW_ARCHIVE||LA279_1==KW_ASC||(LA279_1 >= KW_AUTOCOMMIT && LA279_1 <= KW_BEFORE)||(LA279_1 >= KW_BUCKET && LA279_1 <= KW_BUCKETS)||(LA279_1 >= KW_CACHE && LA279_1 <= KW_CASCADE)||LA279_1==KW_CHANGE||(LA279_1 >= KW_CHECK && LA279_1 <= KW_COLLECTION)||(LA279_1 >= KW_COLUMNS && LA279_1 <= KW_COMMENT)||(LA279_1 >= KW_COMPACT && LA279_1 <= KW_CONCATENATE)||LA279_1==KW_CONTINUE||LA279_1==KW_DATA||LA279_1==KW_DATABASES||(LA279_1 >= KW_DATETIME && LA279_1 <= KW_DBPROPERTIES)||(LA279_1 >= KW_DEFAULT && LA279_1 <= KW_DEFINED)||(LA279_1 >= KW_DELIMITED && LA279_1 <= KW_DESC)||(LA279_1 >= KW_DETAIL && LA279_1 <= KW_DISABLE)||(LA279_1 >= KW_DISTRIBUTE && LA279_1 <= KW_DO)||LA279_1==KW_DOW||(LA279_1 >= KW_DUMP && LA279_1 <= KW_ELEM_TYPE)||LA279_1==KW_ENABLE||(LA279_1 >= KW_ENFORCED && LA279_1 <= KW_ESCAPED)||LA279_1==KW_EXCLUSIVE||(LA279_1 >= KW_EXPLAIN && LA279_1 <= KW_EXPRESSION)||(LA279_1 >= KW_FIELDS && LA279_1 <= KW_FIRST)||(LA279_1 >= KW_FORMAT && LA279_1 <= KW_FORMATTED)||LA279_1==KW_FUNCTIONS||(LA279_1 >= KW_HOUR && LA279_1 <= KW_IDXPROPERTIES)||(LA279_1 >= KW_INDEX && LA279_1 <= KW_INDEXES)||(LA279_1 >= KW_INPATH && LA279_1 <= KW_INPUTFORMAT)||(LA279_1 >= KW_ISOLATION && LA279_1 <= KW_JAR)||(LA279_1 >= KW_KEY && LA279_1 <= KW_LAST)||LA279_1==KW_LEVEL||(LA279_1 >= KW_LIMIT && LA279_1 <= KW_LOAD)||(LA279_1 >= KW_LOCATION && LA279_1 <= KW_LONG)||LA279_1==KW_MANAGEMENT||(LA279_1 >= KW_MAPJOIN && LA279_1 <= KW_MATERIALIZED)||LA279_1==KW_METADATA||(LA279_1 >= KW_MINUTE && LA279_1 <= KW_MONTH)||(LA279_1 >= KW_MOVE && LA279_1 <= KW_MSCK)||(LA279_1 >= KW_NORELY && LA279_1 <= KW_NOSCAN)||LA279_1==KW_NOVALIDATE||LA279_1==KW_NULLS||LA279_1==KW_OFFSET||(LA279_1 >= KW_OPERATOR && LA279_1 <= KW_OPTION)||(LA279_1 >= KW_OUTPUTDRIVER && LA279_1 <= KW_OUTPUTFORMAT)||(LA279_1 >= KW_OVERWRITE && LA279_1 <= KW_OWNER)||(LA279_1 >= KW_PARTITIONED && LA279_1 <= KW_PATH)||(LA279_1 >= KW_PLAN && LA279_1 <= KW_POOL)||LA279_1==KW_PRINCIPALS||(LA279_1 >= KW_PURGE && LA279_1 <= KW_QUERY_PARALLELISM)||LA279_1==KW_READ||(LA279_1 >= KW_REBUILD && LA279_1 <= KW_RECORDWRITER)||(LA279_1 >= KW_RELOAD && LA279_1 <= KW_RESTRICT)||LA279_1==KW_REWRITE||(LA279_1 >= KW_ROLE && LA279_1 <= KW_ROLES)||(LA279_1 >= KW_SCHEDULING_POLICY && LA279_1 <= KW_SECOND)||(LA279_1 >= KW_SEMI && LA279_1 <= KW_SERVER)||(LA279_1 >= KW_SETS && LA279_1 <= KW_SKEWED)||(LA279_1 >= KW_SNAPSHOT && LA279_1 <= KW_SSL)||(LA279_1 >= KW_STATISTICS && LA279_1 <= KW_SUMMARY)||LA279_1==KW_TABLES||(LA279_1 >= KW_TBLPROPERTIES && LA279_1 <= KW_TERMINATED)||LA279_1==KW_TINYINT||(LA279_1 >= KW_TOUCH && LA279_1 <= KW_TRANSACTIONS)||LA279_1==KW_UNARCHIVE||LA279_1==KW_UNDO||LA279_1==KW_UNIONTYPE||(LA279_1 >= KW_UNLOCK && LA279_1 <= KW_UNSIGNED)||(LA279_1 >= KW_URI && LA279_1 <= KW_USE)||(LA279_1 >= KW_UTC && LA279_1 <= KW_VALIDATE)||LA279_1==KW_VALUE_TYPE||(LA279_1 >= KW_VECTORIZATION && LA279_1 <= KW_WEEK)||LA279_1==KW_WHILE||(LA279_1 >= KW_WORK && LA279_1 <= KW_ZONE)||LA279_1==KW_BATCH||LA279_1==KW_DAYOFWEEK||LA279_1==KW_HOLD_DDLTIME||LA279_1==KW_IGNORE||LA279_1==KW_NO_DROP||LA279_1==KW_OFFLINE||LA279_1==KW_PROTECTION||LA279_1==KW_READONLY||LA279_1==KW_TIMESTAMPTZ) ) {
					int LA279_9 = input.LA(3);
					if ( (LA279_9==KW_REFERENCES) ) {
						alt279=1;
					}
					else if ( (LA279_9==KW_CHECK||LA279_9==KW_DEFAULT||LA279_9==KW_NOT||LA279_9==KW_PRIMARY||LA279_9==KW_UNIQUE) ) {
						alt279=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 279, 9, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 279, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_REFERENCES:
				{
				alt279=1;
				}
				break;
			case KW_CHECK:
			case KW_DEFAULT:
			case KW_NOT:
			case KW_PRIMARY:
			case KW_UNIQUE:
				{
				alt279=2;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 279, 0, input);
				throw nvae;
			}
			switch (alt279) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2439:7: ( alterForeignKeyConstraint[$fkColName] )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2439:7: ( alterForeignKeyConstraint[$fkColName] )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2439:9: alterForeignKeyConstraint[$fkColName]
					{
					pushFollow(FOLLOW_alterForeignKeyConstraint_in_alterColumnConstraint15274);
					alterForeignKeyConstraint902=alterForeignKeyConstraint(fkColName);
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterForeignKeyConstraint902.getTree());

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2440:7: ( alterColConstraint )
					{
					root_0 = (ASTNode)adaptor.nil();


					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2440:7: ( alterColConstraint )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2440:9: alterColConstraint
					{
					pushFollow(FOLLOW_alterColConstraint_in_alterColumnConstraint15287);
					alterColConstraint903=alterColConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, alterColConstraint903.getTree());

					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterColumnConstraint"


	public static class alterForeignKeyConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterForeignKeyConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2443:1: alterForeignKeyConstraint[CommonTree fkColName] : ( KW_CONSTRAINT constraintName= identifier )? KW_REFERENCES tabName= tableName LPAREN colName= columnName RPAREN ( constraintOptsAlter )? -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? ) -> ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? ) ;
	public final HiveParser.alterForeignKeyConstraint_return alterForeignKeyConstraint(CommonTree fkColName) throws RecognitionException {
		HiveParser.alterForeignKeyConstraint_return retval = new HiveParser.alterForeignKeyConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT904=null;
		Token KW_REFERENCES905=null;
		Token LPAREN906=null;
		Token RPAREN907=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope tabName =null;
		ParserRuleReturnScope colName =null;
		ParserRuleReturnScope constraintOptsAlter908 =null;

		ASTNode KW_CONSTRAINT904_tree=null;
		ASTNode KW_REFERENCES905_tree=null;
		ASTNode LPAREN906_tree=null;
		ASTNode RPAREN907_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_REFERENCES=new RewriteRuleTokenStream(adaptor,"token KW_REFERENCES");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_constraintOptsAlter=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsAlter");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
		RewriteRuleSubtreeStream stream_columnName=new RewriteRuleSubtreeStream(adaptor,"rule columnName");

		 pushMsg("alter column constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2446:5: ( ( KW_CONSTRAINT constraintName= identifier )? KW_REFERENCES tabName= tableName LPAREN colName= columnName RPAREN ( constraintOptsAlter )? -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? ) -> ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2446:7: ( KW_CONSTRAINT constraintName= identifier )? KW_REFERENCES tabName= tableName LPAREN colName= columnName RPAREN ( constraintOptsAlter )?
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2446:7: ( KW_CONSTRAINT constraintName= identifier )?
			int alt280=2;
			int LA280_0 = input.LA(1);
			if ( (LA280_0==KW_CONSTRAINT) ) {
				alt280=1;
			}
			switch (alt280) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2446:8: KW_CONSTRAINT constraintName= identifier
					{
					KW_CONSTRAINT904=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_alterForeignKeyConstraint15318); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT904);

					pushFollow(FOLLOW_identifier_in_alterForeignKeyConstraint15322);
					constraintName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
					}
					break;

			}

			KW_REFERENCES905=(Token)match(input,KW_REFERENCES,FOLLOW_KW_REFERENCES_in_alterForeignKeyConstraint15326); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_REFERENCES.add(KW_REFERENCES905);

			pushFollow(FOLLOW_tableName_in_alterForeignKeyConstraint15330);
			tabName=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tabName.getTree());
			LPAREN906=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_alterForeignKeyConstraint15332); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN906);

			pushFollow(FOLLOW_columnName_in_alterForeignKeyConstraint15336);
			colName=columnName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnName.add(colName.getTree());
			RPAREN907=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_alterForeignKeyConstraint15338); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN907);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2446:115: ( constraintOptsAlter )?
			int alt281=2;
			int LA281_0 = input.LA(1);
			if ( (LA281_0==KW_DISABLE||LA281_0==KW_ENABLE||LA281_0==KW_ENFORCED||LA281_0==KW_NOT) ) {
				alt281=1;
			}
			switch (alt281) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2446:115: constraintOptsAlter
					{
					pushFollow(FOLLOW_constraintOptsAlter_in_alterForeignKeyConstraint15340);
					constraintOptsAlter908=constraintOptsAlter();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsAlter.add(constraintOptsAlter908.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: constraintOptsAlter, tabName, colName, colName, constraintName, tabName, constraintOptsAlter
			// token labels: 
			// rule labels: tabName, colName, constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.getTree():null);
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2447:5: -> {$constraintName.tree != null}? ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? )
			if ((constraintName!=null?((ASTNode)constraintName.getTree()):null) != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2448:13: ^( TOK_FOREIGN_KEY ^( TOK_CONSTRAINT_NAME $constraintName) ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FOREIGN_KEY, "TOK_FOREIGN_KEY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2448:31: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2448:70: ^( TOK_TABCOLNAME )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, fkColName);
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_1, stream_tabName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2448:110: ^( TOK_TABCOLNAME $colName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, stream_colName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2448:137: ( constraintOptsAlter )?
				if ( stream_constraintOptsAlter.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsAlter.nextTree());
				}
				stream_constraintOptsAlter.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2449:5: -> ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2449:8: ^( TOK_FOREIGN_KEY ^( TOK_TABCOLNAME ) $tabName ^( TOK_TABCOLNAME $colName) ( constraintOptsAlter )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FOREIGN_KEY, "TOK_FOREIGN_KEY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2449:26: ^( TOK_TABCOLNAME )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, fkColName);
				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_1, stream_tabName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2449:66: ^( TOK_TABCOLNAME $colName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME"), root_2);
				adaptor.addChild(root_2, stream_colName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2449:93: ( constraintOptsAlter )?
				if ( stream_constraintOptsAlter.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsAlter.nextTree());
				}
				stream_constraintOptsAlter.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterForeignKeyConstraint"


	public static class alterColConstraint_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "alterColConstraint"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2452:1: alterColConstraint : ( KW_CONSTRAINT constraintName= identifier )? columnConstraintType ( constraintOptsAlter )? -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? ) -> ^( ( constraintOptsAlter )? ) ;
	public final HiveParser.alterColConstraint_return alterColConstraint() throws RecognitionException {
		HiveParser.alterColConstraint_return retval = new HiveParser.alterColConstraint_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_CONSTRAINT909=null;
		ParserRuleReturnScope constraintName =null;
		ParserRuleReturnScope columnConstraintType910 =null;
		ParserRuleReturnScope constraintOptsAlter911 =null;

		ASTNode KW_CONSTRAINT909_tree=null;
		RewriteRuleTokenStream stream_KW_CONSTRAINT=new RewriteRuleTokenStream(adaptor,"token KW_CONSTRAINT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_columnConstraintType=new RewriteRuleSubtreeStream(adaptor,"rule columnConstraintType");
		RewriteRuleSubtreeStream stream_constraintOptsAlter=new RewriteRuleSubtreeStream(adaptor,"rule constraintOptsAlter");

		 pushMsg("alter column constraint", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2455:5: ( ( KW_CONSTRAINT constraintName= identifier )? columnConstraintType ( constraintOptsAlter )? -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? ) -> ^( ( constraintOptsAlter )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2455:7: ( KW_CONSTRAINT constraintName= identifier )? columnConstraintType ( constraintOptsAlter )?
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2455:7: ( KW_CONSTRAINT constraintName= identifier )?
			int alt282=2;
			int LA282_0 = input.LA(1);
			if ( (LA282_0==KW_CONSTRAINT) ) {
				alt282=1;
			}
			switch (alt282) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2455:8: KW_CONSTRAINT constraintName= identifier
					{
					KW_CONSTRAINT909=(Token)match(input,KW_CONSTRAINT,FOLLOW_KW_CONSTRAINT_in_alterColConstraint15448); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CONSTRAINT.add(KW_CONSTRAINT909);

					pushFollow(FOLLOW_identifier_in_alterColConstraint15452);
					constraintName=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(constraintName.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_columnConstraintType_in_alterColConstraint15456);
			columnConstraintType910=columnConstraintType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnConstraintType.add(columnConstraintType910.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2455:71: ( constraintOptsAlter )?
			int alt283=2;
			int LA283_0 = input.LA(1);
			if ( (LA283_0==KW_DISABLE||LA283_0==KW_ENABLE||LA283_0==KW_ENFORCED||LA283_0==KW_NOT) ) {
				alt283=1;
			}
			switch (alt283) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2455:71: constraintOptsAlter
					{
					pushFollow(FOLLOW_constraintOptsAlter_in_alterColConstraint15458);
					constraintOptsAlter911=constraintOptsAlter();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_constraintOptsAlter.add(constraintOptsAlter911.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: constraintName, constraintOptsAlter, constraintOptsAlter
			// token labels: 
			// rule labels: constraintName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_constraintName=new RewriteRuleSubtreeStream(adaptor,"rule constraintName",constraintName!=null?constraintName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2456:5: -> {$constraintName.tree != null}? ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? )
			if ((constraintName!=null?((ASTNode)constraintName.getTree()):null) != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2457:13: ^( ^( TOK_CONSTRAINT_NAME $constraintName) ( constraintOptsAlter )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((columnConstraintType910!=null?((ASTNode)columnConstraintType910.getTree()):null), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2457:44: ^( TOK_CONSTRAINT_NAME $constraintName)
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CONSTRAINT_NAME, "TOK_CONSTRAINT_NAME"), root_2);
				adaptor.addChild(root_2, stream_constraintName.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2457:83: ( constraintOptsAlter )?
				if ( stream_constraintOptsAlter.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsAlter.nextTree());
				}
				stream_constraintOptsAlter.reset();

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2458:5: -> ^( ( constraintOptsAlter )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2458:8: ^( ( constraintOptsAlter )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((columnConstraintType910!=null?((ASTNode)columnConstraintType910.getTree()):null), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2458:39: ( constraintOptsAlter )?
				if ( stream_constraintOptsAlter.hasNext() ) {
					adaptor.addChild(root_1, stream_constraintOptsAlter.nextTree());
				}
				stream_constraintOptsAlter.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "alterColConstraint"


	public static class columnConstraintType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnConstraintType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2461:1: columnConstraintType : ( KW_NOT KW_NULL -> TOK_NOT_NULL | KW_DEFAULT defaultVal -> ^( TOK_DEFAULT_VALUE defaultVal ) | checkConstraint | tableConstraintType );
	public final HiveParser.columnConstraintType_return columnConstraintType() throws RecognitionException {
		HiveParser.columnConstraintType_return retval = new HiveParser.columnConstraintType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_NOT912=null;
		Token KW_NULL913=null;
		Token KW_DEFAULT914=null;
		ParserRuleReturnScope defaultVal915 =null;
		ParserRuleReturnScope checkConstraint916 =null;
		ParserRuleReturnScope tableConstraintType917 =null;

		ASTNode KW_NOT912_tree=null;
		ASTNode KW_NULL913_tree=null;
		ASTNode KW_DEFAULT914_tree=null;
		RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
		RewriteRuleTokenStream stream_KW_NULL=new RewriteRuleTokenStream(adaptor,"token KW_NULL");
		RewriteRuleTokenStream stream_KW_DEFAULT=new RewriteRuleTokenStream(adaptor,"token KW_DEFAULT");
		RewriteRuleSubtreeStream stream_defaultVal=new RewriteRuleSubtreeStream(adaptor,"rule defaultVal");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2462:5: ( KW_NOT KW_NULL -> TOK_NOT_NULL | KW_DEFAULT defaultVal -> ^( TOK_DEFAULT_VALUE defaultVal ) | checkConstraint | tableConstraintType )
			int alt284=4;
			switch ( input.LA(1) ) {
			case KW_NOT:
				{
				alt284=1;
				}
				break;
			case KW_DEFAULT:
				{
				alt284=2;
				}
				break;
			case KW_CHECK:
				{
				alt284=3;
				}
				break;
			case KW_PRIMARY:
			case KW_UNIQUE:
				{
				alt284=4;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 284, 0, input);
				throw nvae;
			}
			switch (alt284) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2462:7: KW_NOT KW_NULL
					{
					KW_NOT912=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_columnConstraintType15523); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT912);

					KW_NULL913=(Token)match(input,KW_NULL,FOLLOW_KW_NULL_in_columnConstraintType15525); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_NULL.add(KW_NULL913);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2462:28: -> TOK_NOT_NULL
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_NOT_NULL, "TOK_NOT_NULL"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2463:7: KW_DEFAULT defaultVal
					{
					KW_DEFAULT914=(Token)match(input,KW_DEFAULT,FOLLOW_KW_DEFAULT_in_columnConstraintType15546); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DEFAULT.add(KW_DEFAULT914);

					pushFollow(FOLLOW_defaultVal_in_columnConstraintType15548);
					defaultVal915=defaultVal();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_defaultVal.add(defaultVal915.getTree());
					// AST REWRITE
					// elements: defaultVal
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2463:28: -> ^( TOK_DEFAULT_VALUE defaultVal )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2463:34: ^( TOK_DEFAULT_VALUE defaultVal )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DEFAULT_VALUE, "TOK_DEFAULT_VALUE"), root_1);
						adaptor.addChild(root_1, stream_defaultVal.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2464:7: checkConstraint
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_checkConstraint_in_columnConstraintType15566);
					checkConstraint916=checkConstraint();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, checkConstraint916.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2465:7: tableConstraintType
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_tableConstraintType_in_columnConstraintType15574);
					tableConstraintType917=tableConstraintType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, tableConstraintType917.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnConstraintType"


	public static class defaultVal_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "defaultVal"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2468:1: defaultVal : ( constant | function | castExpression );
	public final HiveParser.defaultVal_return defaultVal() throws RecognitionException {
		HiveParser.defaultVal_return retval = new HiveParser.defaultVal_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope constant918 =null;
		ParserRuleReturnScope function919 =null;
		ParserRuleReturnScope castExpression920 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2469:5: ( constant | function | castExpression )
			int alt285=3;
			switch ( input.LA(1) ) {
			case CharSetName:
			case IntegralLiteral:
			case KW_FALSE:
			case KW_NULL:
			case KW_TIMESTAMPLOCALTZ:
			case KW_TRUE:
			case Number:
			case NumberLiteral:
			case StringLiteral:
				{
				alt285=1;
				}
				break;
			case KW_DATE:
				{
				int LA285_3 = input.LA(2);
				if ( (LA285_3==StringLiteral) ) {
					alt285=1;
				}
				else if ( (LA285_3==LPAREN) ) {
					alt285=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 285, 3, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_CURRENT_DATE:
				{
				int LA285_4 = input.LA(2);
				if ( (LA285_4==EOF||LA285_4==COMMA||LA285_4==KW_AFTER||LA285_4==KW_CASCADE||LA285_4==KW_COMMENT||LA285_4==KW_DISABLE||LA285_4==KW_ENABLE||LA285_4==KW_ENFORCED||LA285_4==KW_FIRST||LA285_4==KW_NOT||LA285_4==KW_RESTRICT||LA285_4==RPAREN) ) {
					alt285=1;
				}
				else if ( (LA285_4==LPAREN) ) {
					alt285=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 285, 4, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_TIMESTAMP:
				{
				int LA285_5 = input.LA(2);
				if ( (LA285_5==StringLiteral) ) {
					alt285=1;
				}
				else if ( (LA285_5==LPAREN) ) {
					alt285=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 285, 5, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_CURRENT_TIMESTAMP:
				{
				int LA285_6 = input.LA(2);
				if ( (LA285_6==EOF||LA285_6==COMMA||LA285_6==KW_AFTER||LA285_6==KW_CASCADE||LA285_6==KW_COMMENT||LA285_6==KW_DISABLE||LA285_6==KW_ENABLE||LA285_6==KW_ENFORCED||LA285_6==KW_FIRST||LA285_6==KW_NOT||LA285_6==KW_RESTRICT||LA285_6==RPAREN) ) {
					alt285=1;
				}
				else if ( (LA285_6==LPAREN) ) {
					alt285=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 285, 6, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case Identifier:
			case KW_ABORT:
			case KW_ACTIVATE:
			case KW_ACTIVE:
			case KW_ADD:
			case KW_ADMIN:
			case KW_AFTER:
			case KW_ALLOC_FRACTION:
			case KW_ANALYZE:
			case KW_ARCHIVE:
			case KW_ARRAY:
			case KW_ASC:
			case KW_AUTOCOMMIT:
			case KW_BEFORE:
			case KW_BIGINT:
			case KW_BINARY:
			case KW_BOOLEAN:
			case KW_BUCKET:
			case KW_BUCKETS:
			case KW_CACHE:
			case KW_CASCADE:
			case KW_CHANGE:
			case KW_CHECK:
			case KW_CLUSTER:
			case KW_CLUSTERED:
			case KW_CLUSTERSTATUS:
			case KW_COLLECTION:
			case KW_COLUMNS:
			case KW_COMMENT:
			case KW_COMPACT:
			case KW_COMPACTIONS:
			case KW_COMPUTE:
			case KW_CONCATENATE:
			case KW_CONTINUE:
			case KW_DATA:
			case KW_DATABASES:
			case KW_DATETIME:
			case KW_DAY:
			case KW_DBPROPERTIES:
			case KW_DEFAULT:
			case KW_DEFERRED:
			case KW_DEFINED:
			case KW_DELIMITED:
			case KW_DEPENDENCY:
			case KW_DESC:
			case KW_DETAIL:
			case KW_DIRECTORIES:
			case KW_DIRECTORY:
			case KW_DISABLE:
			case KW_DISTRIBUTE:
			case KW_DO:
			case KW_DOUBLE:
			case KW_DOW:
			case KW_DUMP:
			case KW_ELEM_TYPE:
			case KW_ENABLE:
			case KW_ENFORCED:
			case KW_ESCAPED:
			case KW_EXCLUSIVE:
			case KW_EXPLAIN:
			case KW_EXPORT:
			case KW_EXPRESSION:
			case KW_FIELDS:
			case KW_FILE:
			case KW_FILEFORMAT:
			case KW_FIRST:
			case KW_FLOAT:
			case KW_FORMAT:
			case KW_FORMATTED:
			case KW_FUNCTIONS:
			case KW_GROUPING:
			case KW_HOUR:
			case KW_IDXPROPERTIES:
			case KW_IF:
			case KW_INDEX:
			case KW_INDEXES:
			case KW_INPATH:
			case KW_INPUTDRIVER:
			case KW_INPUTFORMAT:
			case KW_INT:
			case KW_ISOLATION:
			case KW_ITEMS:
			case KW_JAR:
			case KW_KEY:
			case KW_KEYS:
			case KW_KEY_TYPE:
			case KW_KILL:
			case KW_LAST:
			case KW_LEVEL:
			case KW_LIMIT:
			case KW_LINES:
			case KW_LOAD:
			case KW_LOCATION:
			case KW_LOCK:
			case KW_LOCKS:
			case KW_LOGICAL:
			case KW_LONG:
			case KW_MANAGEMENT:
			case KW_MAP:
			case KW_MAPJOIN:
			case KW_MAPPING:
			case KW_MATCHED:
			case KW_MATERIALIZED:
			case KW_METADATA:
			case KW_MINUTE:
			case KW_MONTH:
			case KW_MOVE:
			case KW_MSCK:
			case KW_NORELY:
			case KW_NOSCAN:
			case KW_NOVALIDATE:
			case KW_NULLS:
			case KW_OFFSET:
			case KW_OPERATOR:
			case KW_OPTION:
			case KW_OUTPUTDRIVER:
			case KW_OUTPUTFORMAT:
			case KW_OVERWRITE:
			case KW_OWNER:
			case KW_PARTITIONED:
			case KW_PARTITIONS:
			case KW_PATH:
			case KW_PLAN:
			case KW_PLANS:
			case KW_PLUS:
			case KW_POOL:
			case KW_PRINCIPALS:
			case KW_PURGE:
			case KW_QUARTER:
			case KW_QUERY:
			case KW_QUERY_PARALLELISM:
			case KW_READ:
			case KW_REBUILD:
			case KW_RECORDREADER:
			case KW_RECORDWRITER:
			case KW_RELOAD:
			case KW_RELY:
			case KW_RENAME:
			case KW_REOPTIMIZATION:
			case KW_REPAIR:
			case KW_REPL:
			case KW_REPLACE:
			case KW_REPLICATION:
			case KW_RESOURCE:
			case KW_RESTRICT:
			case KW_REWRITE:
			case KW_ROLE:
			case KW_ROLES:
			case KW_SCHEDULING_POLICY:
			case KW_SCHEMA:
			case KW_SCHEMAS:
			case KW_SECOND:
			case KW_SEMI:
			case KW_SERDE:
			case KW_SERDEPROPERTIES:
			case KW_SERVER:
			case KW_SETS:
			case KW_SHARED:
			case KW_SHOW:
			case KW_SHOW_DATABASE:
			case KW_SKEWED:
			case KW_SMALLINT:
			case KW_SNAPSHOT:
			case KW_SORT:
			case KW_SORTED:
			case KW_SSL:
			case KW_STATISTICS:
			case KW_STATUS:
			case KW_STORED:
			case KW_STREAMTABLE:
			case KW_STRING:
			case KW_STRUCT:
			case KW_SUMMARY:
			case KW_TABLES:
			case KW_TBLPROPERTIES:
			case KW_TEMPORARY:
			case KW_TERMINATED:
			case KW_TINYINT:
			case KW_TOUCH:
			case KW_TRANSACTION:
			case KW_TRANSACTIONS:
			case KW_UNARCHIVE:
			case KW_UNDO:
			case KW_UNIONTYPE:
			case KW_UNLOCK:
			case KW_UNMANAGED:
			case KW_UNSET:
			case KW_UNSIGNED:
			case KW_URI:
			case KW_USE:
			case KW_UTC:
			case KW_UTCTIMESTAMP:
			case KW_VALIDATE:
			case KW_VALUE_TYPE:
			case KW_VECTORIZATION:
			case KW_VIEW:
			case KW_VIEWS:
			case KW_WAIT:
			case KW_WEEK:
			case KW_WHILE:
			case KW_WORK:
			case KW_WORKLOAD:
			case KW_WRITE:
			case KW_YEAR:
			case KW_ZONE:
			case KW_BATCH:
			case KW_DAYOFWEEK:
			case KW_HOLD_DDLTIME:
			case KW_IGNORE:
			case KW_NO_DROP:
			case KW_OFFLINE:
			case KW_PROTECTION:
			case KW_READONLY:
			case KW_TIMESTAMPTZ:
				{
				alt285=2;
				}
				break;
			case KW_CAST:
				{
				alt285=3;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 285, 0, input);
				throw nvae;
			}
			switch (alt285) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2469:7: constant
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_constant_in_defaultVal15591);
					constant918=constant();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, constant918.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2470:7: function
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_function_in_defaultVal15599);
					function919=function();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, function919.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2471:7: castExpression
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_castExpression_in_defaultVal15607);
					castExpression920=castExpression();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, castExpression920.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "defaultVal"


	public static class tableConstraintType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "tableConstraintType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2474:1: tableConstraintType : ( KW_PRIMARY KW_KEY -> TOK_PRIMARY_KEY | KW_UNIQUE -> TOK_UNIQUE );
	public final HiveParser.tableConstraintType_return tableConstraintType() throws RecognitionException {
		HiveParser.tableConstraintType_return retval = new HiveParser.tableConstraintType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_PRIMARY921=null;
		Token KW_KEY922=null;
		Token KW_UNIQUE923=null;

		ASTNode KW_PRIMARY921_tree=null;
		ASTNode KW_KEY922_tree=null;
		ASTNode KW_UNIQUE923_tree=null;
		RewriteRuleTokenStream stream_KW_PRIMARY=new RewriteRuleTokenStream(adaptor,"token KW_PRIMARY");
		RewriteRuleTokenStream stream_KW_UNIQUE=new RewriteRuleTokenStream(adaptor,"token KW_UNIQUE");
		RewriteRuleTokenStream stream_KW_KEY=new RewriteRuleTokenStream(adaptor,"token KW_KEY");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2475:5: ( KW_PRIMARY KW_KEY -> TOK_PRIMARY_KEY | KW_UNIQUE -> TOK_UNIQUE )
			int alt286=2;
			int LA286_0 = input.LA(1);
			if ( (LA286_0==KW_PRIMARY) ) {
				alt286=1;
			}
			else if ( (LA286_0==KW_UNIQUE) ) {
				alt286=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 286, 0, input);
				throw nvae;
			}

			switch (alt286) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2475:7: KW_PRIMARY KW_KEY
					{
					KW_PRIMARY921=(Token)match(input,KW_PRIMARY,FOLLOW_KW_PRIMARY_in_tableConstraintType15624); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_PRIMARY.add(KW_PRIMARY921);

					KW_KEY922=(Token)match(input,KW_KEY,FOLLOW_KW_KEY_in_tableConstraintType15626); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_KEY.add(KW_KEY922);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2475:28: -> TOK_PRIMARY_KEY
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_PRIMARY_KEY, "TOK_PRIMARY_KEY"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2476:7: KW_UNIQUE
					{
					KW_UNIQUE923=(Token)match(input,KW_UNIQUE,FOLLOW_KW_UNIQUE_in_tableConstraintType15644); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_UNIQUE.add(KW_UNIQUE923);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2476:28: -> TOK_UNIQUE
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_UNIQUE, "TOK_UNIQUE"));
					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "tableConstraintType"


	public static class constraintOptsCreate_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "constraintOptsCreate"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2479:1: constraintOptsCreate : enableValidateSpecification relySpecification ;
	public final HiveParser.constraintOptsCreate_return constraintOptsCreate() throws RecognitionException {
		HiveParser.constraintOptsCreate_return retval = new HiveParser.constraintOptsCreate_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope enableValidateSpecification924 =null;
		ParserRuleReturnScope relySpecification925 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2480:5: ( enableValidateSpecification relySpecification )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2480:7: enableValidateSpecification relySpecification
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_enableValidateSpecification_in_constraintOptsCreate15679);
			enableValidateSpecification924=enableValidateSpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, enableValidateSpecification924.getTree());

			pushFollow(FOLLOW_relySpecification_in_constraintOptsCreate15681);
			relySpecification925=relySpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, relySpecification925.getTree());

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "constraintOptsCreate"


	public static class constraintOptsAlter_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "constraintOptsAlter"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2483:1: constraintOptsAlter : enableValidateSpecification relySpecification ;
	public final HiveParser.constraintOptsAlter_return constraintOptsAlter() throws RecognitionException {
		HiveParser.constraintOptsAlter_return retval = new HiveParser.constraintOptsAlter_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope enableValidateSpecification926 =null;
		ParserRuleReturnScope relySpecification927 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2484:5: ( enableValidateSpecification relySpecification )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2484:7: enableValidateSpecification relySpecification
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_enableValidateSpecification_in_constraintOptsAlter15698);
			enableValidateSpecification926=enableValidateSpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, enableValidateSpecification926.getTree());

			pushFollow(FOLLOW_relySpecification_in_constraintOptsAlter15700);
			relySpecification927=relySpecification();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, relySpecification927.getTree());

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "constraintOptsAlter"


	public static class columnNameColonType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnNameColonType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2487:1: columnNameColonType : colName= identifier COLON colType ( KW_COMMENT comment= StringLiteral )? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) ;
	public final HiveParser.columnNameColonType_return columnNameColonType() throws RecognitionException {
		HiveParser.columnNameColonType_return retval = new HiveParser.columnNameColonType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token comment=null;
		Token COLON928=null;
		Token KW_COMMENT930=null;
		ParserRuleReturnScope colName =null;
		ParserRuleReturnScope colType929 =null;

		ASTNode comment_tree=null;
		ASTNode COLON928_tree=null;
		ASTNode KW_COMMENT930_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_COLON=new RewriteRuleTokenStream(adaptor,"token COLON");
		RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");

		 pushMsg("column specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2490:5: (colName= identifier COLON colType ( KW_COMMENT comment= StringLiteral )? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2490:7: colName= identifier COLON colType ( KW_COMMENT comment= StringLiteral )?
			{
			pushFollow(FOLLOW_identifier_in_columnNameColonType15729);
			colName=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(colName.getTree());
			COLON928=(Token)match(input,COLON,FOLLOW_COLON_in_columnNameColonType15731); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_COLON.add(COLON928);

			pushFollow(FOLLOW_colType_in_columnNameColonType15733);
			colType929=colType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_colType.add(colType929.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2490:40: ( KW_COMMENT comment= StringLiteral )?
			int alt287=2;
			int LA287_0 = input.LA(1);
			if ( (LA287_0==KW_COMMENT) ) {
				alt287=1;
			}
			switch (alt287) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2490:41: KW_COMMENT comment= StringLiteral
					{
					KW_COMMENT930=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_columnNameColonType15736); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_COMMENT.add(KW_COMMENT930);

					comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_columnNameColonType15740); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(comment);

					}
					break;

			}

			// AST REWRITE
			// elements: colType, colType, colName, colName, comment
			// token labels: comment
			// rule labels: colName, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
			RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2491:5: -> {$comment == null}? ^( TOK_TABCOL $colName colType )
			if (comment == null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2491:28: ^( TOK_TABCOL $colName colType )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOL, "TOK_TABCOL"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, stream_colType.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2492:5: -> ^( TOK_TABCOL $colName colType $comment)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2492:28: ^( TOK_TABCOL $colName colType $comment)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABCOL, "TOK_TABCOL"), root_1);
				adaptor.addChild(root_1, stream_colName.nextTree());
				adaptor.addChild(root_1, stream_colType.nextTree());
				adaptor.addChild(root_1, stream_comment.nextNode());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnNameColonType"


	public static class colType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "colType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2495:1: colType : type ;
	public final HiveParser.colType_return colType() throws RecognitionException {
		HiveParser.colType_return retval = new HiveParser.colType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope type931 =null;


		 pushMsg("column type", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2498:5: ( type )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2498:7: type
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_type_in_colType15824);
			type931=type();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, type931.getTree());

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "colType"


	public static class colTypeList_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "colTypeList"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2501:1: colTypeList : colType ( COMMA colType )* -> ^( TOK_COLTYPELIST ( colType )+ ) ;
	public final HiveParser.colTypeList_return colTypeList() throws RecognitionException {
		HiveParser.colTypeList_return retval = new HiveParser.colTypeList_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token COMMA933=null;
		ParserRuleReturnScope colType932 =null;
		ParserRuleReturnScope colType934 =null;

		ASTNode COMMA933_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");

		 pushMsg("column type list", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2504:5: ( colType ( COMMA colType )* -> ^( TOK_COLTYPELIST ( colType )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2504:7: colType ( COMMA colType )*
			{
			pushFollow(FOLLOW_colType_in_colTypeList15851);
			colType932=colType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_colType.add(colType932.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2504:15: ( COMMA colType )*
			loop288:
			while (true) {
				int alt288=2;
				int LA288_0 = input.LA(1);
				if ( (LA288_0==COMMA) ) {
					alt288=1;
				}

				switch (alt288) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2504:16: COMMA colType
					{
					COMMA933=(Token)match(input,COMMA,FOLLOW_COMMA_in_colTypeList15854); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA933);

					pushFollow(FOLLOW_colType_in_colTypeList15856);
					colType934=colType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_colType.add(colType934.getTree());
					}
					break;

				default :
					break loop288;
				}
			}

			// AST REWRITE
			// elements: colType
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2504:32: -> ^( TOK_COLTYPELIST ( colType )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2504:35: ^( TOK_COLTYPELIST ( colType )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_COLTYPELIST, "TOK_COLTYPELIST"), root_1);
				if ( !(stream_colType.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_colType.hasNext() ) {
					adaptor.addChild(root_1, stream_colType.nextTree());
				}
				stream_colType.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "colTypeList"


	public static class type_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "type"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2507:1: type : ( primitiveType | listType | structType | mapType | unionType );
	public final HiveParser.type_return type() throws RecognitionException {
		HiveParser.type_return retval = new HiveParser.type_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope primitiveType935 =null;
		ParserRuleReturnScope listType936 =null;
		ParserRuleReturnScope structType937 =null;
		ParserRuleReturnScope mapType938 =null;
		ParserRuleReturnScope unionType939 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2508:5: ( primitiveType | listType | structType | mapType | unionType )
			int alt289=5;
			switch ( input.LA(1) ) {
			case KW_BIGINT:
			case KW_BINARY:
			case KW_BOOLEAN:
			case KW_CHAR:
			case KW_DATE:
			case KW_DATETIME:
			case KW_DECIMAL:
			case KW_DOUBLE:
			case KW_FLOAT:
			case KW_INT:
			case KW_SMALLINT:
			case KW_STRING:
			case KW_TIMESTAMP:
			case KW_TIMESTAMPLOCALTZ:
			case KW_TINYINT:
			case KW_VARCHAR:
				{
				alt289=1;
				}
				break;
			case KW_ARRAY:
				{
				alt289=2;
				}
				break;
			case KW_STRUCT:
				{
				alt289=3;
				}
				break;
			case KW_MAP:
				{
				alt289=4;
				}
				break;
			case KW_UNIONTYPE:
				{
				alt289=5;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 289, 0, input);
				throw nvae;
			}
			switch (alt289) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2508:7: primitiveType
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_primitiveType_in_type15884);
					primitiveType935=primitiveType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, primitiveType935.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2509:7: listType
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_listType_in_type15892);
					listType936=listType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, listType936.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2510:7: structType
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_structType_in_type15900);
					structType937=structType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, structType937.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2511:7: mapType
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_mapType_in_type15908);
					mapType938=mapType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, mapType938.getTree());

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2512:7: unionType
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_unionType_in_type15916);
					unionType939=unionType();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, unionType939.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "type"


	public static class primitiveType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "primitiveType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2514:1: primitiveType : ( KW_TINYINT -> TOK_TINYINT | KW_SMALLINT -> TOK_SMALLINT | KW_INT -> TOK_INT | KW_BIGINT -> TOK_BIGINT | KW_BOOLEAN -> TOK_BOOLEAN | KW_FLOAT -> TOK_FLOAT | KW_DOUBLE ( KW_PRECISION )? -> TOK_DOUBLE | KW_DATE -> TOK_DATE | KW_DATETIME -> TOK_DATETIME | KW_TIMESTAMP -> TOK_TIMESTAMP | KW_TIMESTAMPLOCALTZ -> TOK_TIMESTAMPLOCALTZ | KW_TIMESTAMP KW_WITH KW_LOCAL KW_TIME KW_ZONE -> TOK_TIMESTAMPLOCALTZ | KW_STRING -> TOK_STRING | KW_BINARY -> TOK_BINARY | KW_DECIMAL ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )? -> ^( TOK_DECIMAL ( $prec)? ( $scale)? ) | KW_VARCHAR LPAREN length= Number RPAREN -> ^( TOK_VARCHAR $length) | KW_CHAR LPAREN length= Number RPAREN -> ^( TOK_CHAR $length) );
	public final HiveParser.primitiveType_return primitiveType() throws RecognitionException {
		HiveParser.primitiveType_return retval = new HiveParser.primitiveType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token prec=null;
		Token scale=null;
		Token length=null;
		Token KW_TINYINT940=null;
		Token KW_SMALLINT941=null;
		Token KW_INT942=null;
		Token KW_BIGINT943=null;
		Token KW_BOOLEAN944=null;
		Token KW_FLOAT945=null;
		Token KW_DOUBLE946=null;
		Token KW_PRECISION947=null;
		Token KW_DATE948=null;
		Token KW_DATETIME949=null;
		Token KW_TIMESTAMP950=null;
		Token KW_TIMESTAMPLOCALTZ951=null;
		Token KW_TIMESTAMP952=null;
		Token KW_WITH953=null;
		Token KW_LOCAL954=null;
		Token KW_TIME955=null;
		Token KW_ZONE956=null;
		Token KW_STRING957=null;
		Token KW_BINARY958=null;
		Token KW_DECIMAL959=null;
		Token LPAREN960=null;
		Token COMMA961=null;
		Token RPAREN962=null;
		Token KW_VARCHAR963=null;
		Token LPAREN964=null;
		Token RPAREN965=null;
		Token KW_CHAR966=null;
		Token LPAREN967=null;
		Token RPAREN968=null;

		ASTNode prec_tree=null;
		ASTNode scale_tree=null;
		ASTNode length_tree=null;
		ASTNode KW_TINYINT940_tree=null;
		ASTNode KW_SMALLINT941_tree=null;
		ASTNode KW_INT942_tree=null;
		ASTNode KW_BIGINT943_tree=null;
		ASTNode KW_BOOLEAN944_tree=null;
		ASTNode KW_FLOAT945_tree=null;
		ASTNode KW_DOUBLE946_tree=null;
		ASTNode KW_PRECISION947_tree=null;
		ASTNode KW_DATE948_tree=null;
		ASTNode KW_DATETIME949_tree=null;
		ASTNode KW_TIMESTAMP950_tree=null;
		ASTNode KW_TIMESTAMPLOCALTZ951_tree=null;
		ASTNode KW_TIMESTAMP952_tree=null;
		ASTNode KW_WITH953_tree=null;
		ASTNode KW_LOCAL954_tree=null;
		ASTNode KW_TIME955_tree=null;
		ASTNode KW_ZONE956_tree=null;
		ASTNode KW_STRING957_tree=null;
		ASTNode KW_BINARY958_tree=null;
		ASTNode KW_DECIMAL959_tree=null;
		ASTNode LPAREN960_tree=null;
		ASTNode COMMA961_tree=null;
		ASTNode RPAREN962_tree=null;
		ASTNode KW_VARCHAR963_tree=null;
		ASTNode LPAREN964_tree=null;
		ASTNode RPAREN965_tree=null;
		ASTNode KW_CHAR966_tree=null;
		ASTNode LPAREN967_tree=null;
		ASTNode RPAREN968_tree=null;
		RewriteRuleTokenStream stream_KW_DATETIME=new RewriteRuleTokenStream(adaptor,"token KW_DATETIME");
		RewriteRuleTokenStream stream_KW_TIMESTAMP=new RewriteRuleTokenStream(adaptor,"token KW_TIMESTAMP");
		RewriteRuleTokenStream stream_KW_BOOLEAN=new RewriteRuleTokenStream(adaptor,"token KW_BOOLEAN");
		RewriteRuleTokenStream stream_KW_DOUBLE=new RewriteRuleTokenStream(adaptor,"token KW_DOUBLE");
		RewriteRuleTokenStream stream_KW_TIME=new RewriteRuleTokenStream(adaptor,"token KW_TIME");
		RewriteRuleTokenStream stream_KW_CHAR=new RewriteRuleTokenStream(adaptor,"token KW_CHAR");
		RewriteRuleTokenStream stream_KW_INT=new RewriteRuleTokenStream(adaptor,"token KW_INT");
		RewriteRuleTokenStream stream_KW_DECIMAL=new RewriteRuleTokenStream(adaptor,"token KW_DECIMAL");
		RewriteRuleTokenStream stream_KW_ZONE=new RewriteRuleTokenStream(adaptor,"token KW_ZONE");
		RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
		RewriteRuleTokenStream stream_KW_TINYINT=new RewriteRuleTokenStream(adaptor,"token KW_TINYINT");
		RewriteRuleTokenStream stream_KW_PRECISION=new RewriteRuleTokenStream(adaptor,"token KW_PRECISION");
		RewriteRuleTokenStream stream_KW_LOCAL=new RewriteRuleTokenStream(adaptor,"token KW_LOCAL");
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_SMALLINT=new RewriteRuleTokenStream(adaptor,"token KW_SMALLINT");
		RewriteRuleTokenStream stream_KW_DATE=new RewriteRuleTokenStream(adaptor,"token KW_DATE");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_BIGINT=new RewriteRuleTokenStream(adaptor,"token KW_BIGINT");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_STRING=new RewriteRuleTokenStream(adaptor,"token KW_STRING");
		RewriteRuleTokenStream stream_KW_VARCHAR=new RewriteRuleTokenStream(adaptor,"token KW_VARCHAR");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleTokenStream stream_KW_FLOAT=new RewriteRuleTokenStream(adaptor,"token KW_FLOAT");
		RewriteRuleTokenStream stream_KW_TIMESTAMPLOCALTZ=new RewriteRuleTokenStream(adaptor,"token KW_TIMESTAMPLOCALTZ");
		RewriteRuleTokenStream stream_KW_BINARY=new RewriteRuleTokenStream(adaptor,"token KW_BINARY");

		 pushMsg("primitive type specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2517:5: ( KW_TINYINT -> TOK_TINYINT | KW_SMALLINT -> TOK_SMALLINT | KW_INT -> TOK_INT | KW_BIGINT -> TOK_BIGINT | KW_BOOLEAN -> TOK_BOOLEAN | KW_FLOAT -> TOK_FLOAT | KW_DOUBLE ( KW_PRECISION )? -> TOK_DOUBLE | KW_DATE -> TOK_DATE | KW_DATETIME -> TOK_DATETIME | KW_TIMESTAMP -> TOK_TIMESTAMP | KW_TIMESTAMPLOCALTZ -> TOK_TIMESTAMPLOCALTZ | KW_TIMESTAMP KW_WITH KW_LOCAL KW_TIME KW_ZONE -> TOK_TIMESTAMPLOCALTZ | KW_STRING -> TOK_STRING | KW_BINARY -> TOK_BINARY | KW_DECIMAL ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )? -> ^( TOK_DECIMAL ( $prec)? ( $scale)? ) | KW_VARCHAR LPAREN length= Number RPAREN -> ^( TOK_VARCHAR $length) | KW_CHAR LPAREN length= Number RPAREN -> ^( TOK_CHAR $length) )
			int alt293=17;
			switch ( input.LA(1) ) {
			case KW_TINYINT:
				{
				alt293=1;
				}
				break;
			case KW_SMALLINT:
				{
				alt293=2;
				}
				break;
			case KW_INT:
				{
				alt293=3;
				}
				break;
			case KW_BIGINT:
				{
				alt293=4;
				}
				break;
			case KW_BOOLEAN:
				{
				alt293=5;
				}
				break;
			case KW_FLOAT:
				{
				alt293=6;
				}
				break;
			case KW_DOUBLE:
				{
				alt293=7;
				}
				break;
			case KW_DATE:
				{
				alt293=8;
				}
				break;
			case KW_DATETIME:
				{
				alt293=9;
				}
				break;
			case KW_TIMESTAMP:
				{
				int LA293_10 = input.LA(2);
				if ( (LA293_10==KW_WITH) ) {
					alt293=12;
				}
				else if ( (LA293_10==EOF||LA293_10==COMMA||LA293_10==GREATERTHAN||LA293_10==KW_AFTER||LA293_10==KW_CASCADE||(LA293_10 >= KW_CHECK && LA293_10 <= KW_CLUSTER)||LA293_10==KW_COMMENT||LA293_10==KW_CONSTRAINT||LA293_10==KW_DEFAULT||LA293_10==KW_DISTRIBUTE||LA293_10==KW_EXCEPT||LA293_10==KW_FIRST||LA293_10==KW_FROM||LA293_10==KW_GROUP||LA293_10==KW_HAVING||LA293_10==KW_INSERT||LA293_10==KW_INTERSECT||LA293_10==KW_LATERAL||LA293_10==KW_LIMIT||LA293_10==KW_MAP||LA293_10==KW_MINUS||LA293_10==KW_NOT||LA293_10==KW_ORDER||LA293_10==KW_PRIMARY||LA293_10==KW_RECORDREADER||(LA293_10 >= KW_REDUCE && LA293_10 <= KW_REFERENCES)||LA293_10==KW_RESTRICT||LA293_10==KW_ROW||LA293_10==KW_SELECT||LA293_10==KW_SORT||LA293_10==KW_UNION||LA293_10==KW_UNIQUE||LA293_10==KW_WHERE||LA293_10==KW_WINDOW||LA293_10==RPAREN) ) {
					alt293=10;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 293, 10, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_TIMESTAMPLOCALTZ:
				{
				alt293=11;
				}
				break;
			case KW_STRING:
				{
				alt293=13;
				}
				break;
			case KW_BINARY:
				{
				alt293=14;
				}
				break;
			case KW_DECIMAL:
				{
				alt293=15;
				}
				break;
			case KW_VARCHAR:
				{
				alt293=16;
				}
				break;
			case KW_CHAR:
				{
				alt293=17;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 293, 0, input);
				throw nvae;
			}
			switch (alt293) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2517:7: KW_TINYINT
					{
					KW_TINYINT940=(Token)match(input,KW_TINYINT,FOLLOW_KW_TINYINT_in_primitiveType15938); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TINYINT.add(KW_TINYINT940);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2517:24: -> TOK_TINYINT
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_TINYINT, "TOK_TINYINT"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2518:7: KW_SMALLINT
					{
					KW_SMALLINT941=(Token)match(input,KW_SMALLINT,FOLLOW_KW_SMALLINT_in_primitiveType15959); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_SMALLINT.add(KW_SMALLINT941);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2518:24: -> TOK_SMALLINT
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_SMALLINT, "TOK_SMALLINT"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2519:7: KW_INT
					{
					KW_INT942=(Token)match(input,KW_INT,FOLLOW_KW_INT_in_primitiveType15979); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INT.add(KW_INT942);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2519:24: -> TOK_INT
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_INT, "TOK_INT"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2520:7: KW_BIGINT
					{
					KW_BIGINT943=(Token)match(input,KW_BIGINT,FOLLOW_KW_BIGINT_in_primitiveType16004); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_BIGINT.add(KW_BIGINT943);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2520:24: -> TOK_BIGINT
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_BIGINT, "TOK_BIGINT"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2521:7: KW_BOOLEAN
					{
					KW_BOOLEAN944=(Token)match(input,KW_BOOLEAN,FOLLOW_KW_BOOLEAN_in_primitiveType16026); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_BOOLEAN.add(KW_BOOLEAN944);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2521:24: -> TOK_BOOLEAN
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_BOOLEAN, "TOK_BOOLEAN"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2522:7: KW_FLOAT
					{
					KW_FLOAT945=(Token)match(input,KW_FLOAT,FOLLOW_KW_FLOAT_in_primitiveType16047); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_FLOAT.add(KW_FLOAT945);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2522:24: -> TOK_FLOAT
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_FLOAT, "TOK_FLOAT"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2523:7: KW_DOUBLE ( KW_PRECISION )?
					{
					KW_DOUBLE946=(Token)match(input,KW_DOUBLE,FOLLOW_KW_DOUBLE_in_primitiveType16070); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DOUBLE.add(KW_DOUBLE946);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2523:17: ( KW_PRECISION )?
					int alt290=2;
					int LA290_0 = input.LA(1);
					if ( (LA290_0==KW_PRECISION) ) {
						alt290=1;
					}
					switch (alt290) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2523:17: KW_PRECISION
							{
							KW_PRECISION947=(Token)match(input,KW_PRECISION,FOLLOW_KW_PRECISION_in_primitiveType16072); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_PRECISION.add(KW_PRECISION947);

							}
							break;

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2523:37: -> TOK_DOUBLE
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_DOUBLE, "TOK_DOUBLE"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2524:7: KW_DATE
					{
					KW_DATE948=(Token)match(input,KW_DATE,FOLLOW_KW_DATE_in_primitiveType16094); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DATE.add(KW_DATE948);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2524:24: -> TOK_DATE
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_DATE, "TOK_DATE"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 9 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2525:7: KW_DATETIME
					{
					KW_DATETIME949=(Token)match(input,KW_DATETIME,FOLLOW_KW_DATETIME_in_primitiveType16118); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DATETIME.add(KW_DATETIME949);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2525:24: -> TOK_DATETIME
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_DATETIME, "TOK_DATETIME"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 10 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2526:7: KW_TIMESTAMP
					{
					KW_TIMESTAMP950=(Token)match(input,KW_TIMESTAMP,FOLLOW_KW_TIMESTAMP_in_primitiveType16138); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TIMESTAMP.add(KW_TIMESTAMP950);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2526:24: -> TOK_TIMESTAMP
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_TIMESTAMP, "TOK_TIMESTAMP"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 11 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2527:7: KW_TIMESTAMPLOCALTZ
					{
					KW_TIMESTAMPLOCALTZ951=(Token)match(input,KW_TIMESTAMPLOCALTZ,FOLLOW_KW_TIMESTAMPLOCALTZ_in_primitiveType16157); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TIMESTAMPLOCALTZ.add(KW_TIMESTAMPLOCALTZ951);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2527:29: -> TOK_TIMESTAMPLOCALTZ
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_TIMESTAMPLOCALTZ, "TOK_TIMESTAMPLOCALTZ"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 12 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2529:7: KW_TIMESTAMP KW_WITH KW_LOCAL KW_TIME KW_ZONE
					{
					KW_TIMESTAMP952=(Token)match(input,KW_TIMESTAMP,FOLLOW_KW_TIMESTAMP_in_primitiveType16179); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TIMESTAMP.add(KW_TIMESTAMP952);

					KW_WITH953=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_primitiveType16181); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH953);

					KW_LOCAL954=(Token)match(input,KW_LOCAL,FOLLOW_KW_LOCAL_in_primitiveType16183); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LOCAL.add(KW_LOCAL954);

					KW_TIME955=(Token)match(input,KW_TIME,FOLLOW_KW_TIME_in_primitiveType16185); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TIME.add(KW_TIME955);

					KW_ZONE956=(Token)match(input,KW_ZONE,FOLLOW_KW_ZONE_in_primitiveType16187); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ZONE.add(KW_ZONE956);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2529:53: -> TOK_TIMESTAMPLOCALTZ
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_TIMESTAMPLOCALTZ, "TOK_TIMESTAMPLOCALTZ"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 13 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2534:7: KW_STRING
					{
					KW_STRING957=(Token)match(input,KW_STRING,FOLLOW_KW_STRING_in_primitiveType16219); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_STRING.add(KW_STRING957);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2534:24: -> TOK_STRING
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_STRING, "TOK_STRING"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 14 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2535:7: KW_BINARY
					{
					KW_BINARY958=(Token)match(input,KW_BINARY,FOLLOW_KW_BINARY_in_primitiveType16241); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_BINARY.add(KW_BINARY958);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2535:24: -> TOK_BINARY
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_BINARY, "TOK_BINARY"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 15 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:7: KW_DECIMAL ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )?
					{
					KW_DECIMAL959=(Token)match(input,KW_DECIMAL,FOLLOW_KW_DECIMAL_in_primitiveType16263); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DECIMAL.add(KW_DECIMAL959);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:18: ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )?
					int alt292=2;
					int LA292_0 = input.LA(1);
					if ( (LA292_0==LPAREN) ) {
						alt292=1;
					}
					switch (alt292) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:19: LPAREN prec= Number ( COMMA scale= Number )? RPAREN
							{
							LPAREN960=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_primitiveType16266); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN960);

							prec=(Token)match(input,Number,FOLLOW_Number_in_primitiveType16270); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_Number.add(prec);

							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:38: ( COMMA scale= Number )?
							int alt291=2;
							int LA291_0 = input.LA(1);
							if ( (LA291_0==COMMA) ) {
								alt291=1;
							}
							switch (alt291) {
								case 1 :
									// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:39: COMMA scale= Number
									{
									COMMA961=(Token)match(input,COMMA,FOLLOW_COMMA_in_primitiveType16273); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_COMMA.add(COMMA961);

									scale=(Token)match(input,Number,FOLLOW_Number_in_primitiveType16277); if (state.failed) return retval; 
									if ( state.backtracking==0 ) stream_Number.add(scale);

									}
									break;

							}

							RPAREN962=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_primitiveType16281); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN962);

							}
							break;

					}

					// AST REWRITE
					// elements: scale, prec
					// token labels: prec, scale
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_prec=new RewriteRuleTokenStream(adaptor,"token prec",prec);
					RewriteRuleTokenStream stream_scale=new RewriteRuleTokenStream(adaptor,"token scale",scale);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2536:69: -> ^( TOK_DECIMAL ( $prec)? ( $scale)? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:72: ^( TOK_DECIMAL ( $prec)? ( $scale)? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DECIMAL, "TOK_DECIMAL"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:87: ( $prec)?
						if ( stream_prec.hasNext() ) {
							adaptor.addChild(root_1, stream_prec.nextNode());
						}
						stream_prec.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2536:94: ( $scale)?
						if ( stream_scale.hasNext() ) {
							adaptor.addChild(root_1, stream_scale.nextNode());
						}
						stream_scale.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 16 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2537:7: KW_VARCHAR LPAREN length= Number RPAREN
					{
					KW_VARCHAR963=(Token)match(input,KW_VARCHAR,FOLLOW_KW_VARCHAR_in_primitiveType16305); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_VARCHAR.add(KW_VARCHAR963);

					LPAREN964=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_primitiveType16307); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN964);

					length=(Token)match(input,Number,FOLLOW_Number_in_primitiveType16311); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_Number.add(length);

					RPAREN965=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_primitiveType16313); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN965);

					// AST REWRITE
					// elements: length
					// token labels: length
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_length=new RewriteRuleTokenStream(adaptor,"token length",length);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2537:51: -> ^( TOK_VARCHAR $length)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2537:57: ^( TOK_VARCHAR $length)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_VARCHAR, "TOK_VARCHAR"), root_1);
						adaptor.addChild(root_1, stream_length.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 17 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2538:7: KW_CHAR LPAREN length= Number RPAREN
					{
					KW_CHAR966=(Token)match(input,KW_CHAR,FOLLOW_KW_CHAR_in_primitiveType16338); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_CHAR.add(KW_CHAR966);

					LPAREN967=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_primitiveType16340); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN967);

					length=(Token)match(input,Number,FOLLOW_Number_in_primitiveType16344); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_Number.add(length);

					RPAREN968=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_primitiveType16346); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN968);

					// AST REWRITE
					// elements: length
					// token labels: length
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_length=new RewriteRuleTokenStream(adaptor,"token length",length);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2538:48: -> ^( TOK_CHAR $length)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2538:54: ^( TOK_CHAR $length)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CHAR, "TOK_CHAR"), root_1);
						adaptor.addChild(root_1, stream_length.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "primitiveType"


	public static class listType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "listType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2541:1: listType : KW_ARRAY LESSTHAN type GREATERTHAN -> ^( TOK_LIST type ) ;
	public final HiveParser.listType_return listType() throws RecognitionException {
		HiveParser.listType_return retval = new HiveParser.listType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ARRAY969=null;
		Token LESSTHAN970=null;
		Token GREATERTHAN972=null;
		ParserRuleReturnScope type971 =null;

		ASTNode KW_ARRAY969_tree=null;
		ASTNode LESSTHAN970_tree=null;
		ASTNode GREATERTHAN972_tree=null;
		RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
		RewriteRuleTokenStream stream_KW_ARRAY=new RewriteRuleTokenStream(adaptor,"token KW_ARRAY");
		RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
		RewriteRuleSubtreeStream stream_type=new RewriteRuleSubtreeStream(adaptor,"rule type");

		 pushMsg("list type", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2544:5: ( KW_ARRAY LESSTHAN type GREATERTHAN -> ^( TOK_LIST type ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2544:7: KW_ARRAY LESSTHAN type GREATERTHAN
			{
			KW_ARRAY969=(Token)match(input,KW_ARRAY,FOLLOW_KW_ARRAY_in_listType16390); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ARRAY.add(KW_ARRAY969);

			LESSTHAN970=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_listType16392); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LESSTHAN.add(LESSTHAN970);

			pushFollow(FOLLOW_type_in_listType16394);
			type971=type();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_type.add(type971.getTree());
			GREATERTHAN972=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_listType16396); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_GREATERTHAN.add(GREATERTHAN972);

			// AST REWRITE
			// elements: type
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2544:44: -> ^( TOK_LIST type )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2544:47: ^( TOK_LIST type )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_LIST, "TOK_LIST"), root_1);
				adaptor.addChild(root_1, stream_type.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "listType"


	public static class structType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "structType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2547:1: structType : KW_STRUCT LESSTHAN columnNameColonTypeList GREATERTHAN -> ^( TOK_STRUCT columnNameColonTypeList ) ;
	public final HiveParser.structType_return structType() throws RecognitionException {
		HiveParser.structType_return retval = new HiveParser.structType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_STRUCT973=null;
		Token LESSTHAN974=null;
		Token GREATERTHAN976=null;
		ParserRuleReturnScope columnNameColonTypeList975 =null;

		ASTNode KW_STRUCT973_tree=null;
		ASTNode LESSTHAN974_tree=null;
		ASTNode GREATERTHAN976_tree=null;
		RewriteRuleTokenStream stream_KW_STRUCT=new RewriteRuleTokenStream(adaptor,"token KW_STRUCT");
		RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
		RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
		RewriteRuleSubtreeStream stream_columnNameColonTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameColonTypeList");

		 pushMsg("struct type", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2550:5: ( KW_STRUCT LESSTHAN columnNameColonTypeList GREATERTHAN -> ^( TOK_STRUCT columnNameColonTypeList ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2550:7: KW_STRUCT LESSTHAN columnNameColonTypeList GREATERTHAN
			{
			KW_STRUCT973=(Token)match(input,KW_STRUCT,FOLLOW_KW_STRUCT_in_structType16433); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_STRUCT.add(KW_STRUCT973);

			LESSTHAN974=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_structType16435); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LESSTHAN.add(LESSTHAN974);

			pushFollow(FOLLOW_columnNameColonTypeList_in_structType16437);
			columnNameColonTypeList975=columnNameColonTypeList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnNameColonTypeList.add(columnNameColonTypeList975.getTree());
			GREATERTHAN976=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_structType16439); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_GREATERTHAN.add(GREATERTHAN976);

			// AST REWRITE
			// elements: columnNameColonTypeList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2550:62: -> ^( TOK_STRUCT columnNameColonTypeList )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2550:65: ^( TOK_STRUCT columnNameColonTypeList )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_STRUCT, "TOK_STRUCT"), root_1);
				adaptor.addChild(root_1, stream_columnNameColonTypeList.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "structType"


	public static class mapType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "mapType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2553:1: mapType : KW_MAP LESSTHAN left= primitiveType COMMA right= type GREATERTHAN -> ^( TOK_MAP $left $right) ;
	public final HiveParser.mapType_return mapType() throws RecognitionException {
		HiveParser.mapType_return retval = new HiveParser.mapType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_MAP977=null;
		Token LESSTHAN978=null;
		Token COMMA979=null;
		Token GREATERTHAN980=null;
		ParserRuleReturnScope left =null;
		ParserRuleReturnScope right =null;

		ASTNode KW_MAP977_tree=null;
		ASTNode LESSTHAN978_tree=null;
		ASTNode COMMA979_tree=null;
		ASTNode GREATERTHAN980_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_MAP=new RewriteRuleTokenStream(adaptor,"token KW_MAP");
		RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
		RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
		RewriteRuleSubtreeStream stream_type=new RewriteRuleSubtreeStream(adaptor,"rule type");
		RewriteRuleSubtreeStream stream_primitiveType=new RewriteRuleSubtreeStream(adaptor,"rule primitiveType");

		 pushMsg("map type", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2556:5: ( KW_MAP LESSTHAN left= primitiveType COMMA right= type GREATERTHAN -> ^( TOK_MAP $left $right) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2556:7: KW_MAP LESSTHAN left= primitiveType COMMA right= type GREATERTHAN
			{
			KW_MAP977=(Token)match(input,KW_MAP,FOLLOW_KW_MAP_in_mapType16474); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MAP.add(KW_MAP977);

			LESSTHAN978=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_mapType16476); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LESSTHAN.add(LESSTHAN978);

			pushFollow(FOLLOW_primitiveType_in_mapType16480);
			left=primitiveType();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_primitiveType.add(left.getTree());
			COMMA979=(Token)match(input,COMMA,FOLLOW_COMMA_in_mapType16482); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_COMMA.add(COMMA979);

			pushFollow(FOLLOW_type_in_mapType16486);
			right=type();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_type.add(right.getTree());
			GREATERTHAN980=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_mapType16488); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_GREATERTHAN.add(GREATERTHAN980);

			// AST REWRITE
			// elements: right, left
			// token labels: 
			// rule labels: left, right, retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_left=new RewriteRuleSubtreeStream(adaptor,"rule left",left!=null?left.getTree():null);
			RewriteRuleSubtreeStream stream_right=new RewriteRuleSubtreeStream(adaptor,"rule right",right!=null?right.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2557:5: -> ^( TOK_MAP $left $right)
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2557:8: ^( TOK_MAP $left $right)
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_MAP, "TOK_MAP"), root_1);
				adaptor.addChild(root_1, stream_left.nextTree());
				adaptor.addChild(root_1, stream_right.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "mapType"


	public static class unionType_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "unionType"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2560:1: unionType : KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN -> ^( TOK_UNIONTYPE colTypeList ) ;
	public final HiveParser.unionType_return unionType() throws RecognitionException {
		HiveParser.unionType_return retval = new HiveParser.unionType_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UNIONTYPE981=null;
		Token LESSTHAN982=null;
		Token GREATERTHAN984=null;
		ParserRuleReturnScope colTypeList983 =null;

		ASTNode KW_UNIONTYPE981_tree=null;
		ASTNode LESSTHAN982_tree=null;
		ASTNode GREATERTHAN984_tree=null;
		RewriteRuleTokenStream stream_KW_UNIONTYPE=new RewriteRuleTokenStream(adaptor,"token KW_UNIONTYPE");
		RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
		RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
		RewriteRuleSubtreeStream stream_colTypeList=new RewriteRuleSubtreeStream(adaptor,"rule colTypeList");

		 pushMsg("uniontype type", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2563:5: ( KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN -> ^( TOK_UNIONTYPE colTypeList ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2563:7: KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN
			{
			KW_UNIONTYPE981=(Token)match(input,KW_UNIONTYPE,FOLLOW_KW_UNIONTYPE_in_unionType16531); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UNIONTYPE.add(KW_UNIONTYPE981);

			LESSTHAN982=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_unionType16533); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LESSTHAN.add(LESSTHAN982);

			pushFollow(FOLLOW_colTypeList_in_unionType16535);
			colTypeList983=colTypeList();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_colTypeList.add(colTypeList983.getTree());
			GREATERTHAN984=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_unionType16537); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_GREATERTHAN.add(GREATERTHAN984);

			// AST REWRITE
			// elements: colTypeList
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2563:53: -> ^( TOK_UNIONTYPE colTypeList )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2563:56: ^( TOK_UNIONTYPE colTypeList )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UNIONTYPE, "TOK_UNIONTYPE"), root_1);
				adaptor.addChild(root_1, stream_colTypeList.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "unionType"


	public static class setOperator_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "setOperator"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2566:1: setOperator : ( KW_UNION KW_ALL -> ^( TOK_UNIONALL ) | KW_UNION ( KW_DISTINCT )? -> ^( TOK_UNIONDISTINCT ) | KW_INTERSECT KW_ALL -> ^( TOK_INTERSECTALL ) | KW_INTERSECT ( KW_DISTINCT )? -> ^( TOK_INTERSECTDISTINCT ) | KW_EXCEPT KW_ALL -> ^( TOK_EXCEPTALL ) | KW_EXCEPT ( KW_DISTINCT )? -> ^( TOK_EXCEPTDISTINCT ) | KW_MINUS KW_ALL -> ^( TOK_EXCEPTALL ) | KW_MINUS ( KW_DISTINCT )? -> ^( TOK_EXCEPTDISTINCT ) );
	public final HiveParser.setOperator_return setOperator() throws RecognitionException {
		HiveParser.setOperator_return retval = new HiveParser.setOperator_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UNION985=null;
		Token KW_ALL986=null;
		Token KW_UNION987=null;
		Token KW_DISTINCT988=null;
		Token KW_INTERSECT989=null;
		Token KW_ALL990=null;
		Token KW_INTERSECT991=null;
		Token KW_DISTINCT992=null;
		Token KW_EXCEPT993=null;
		Token KW_ALL994=null;
		Token KW_EXCEPT995=null;
		Token KW_DISTINCT996=null;
		Token KW_MINUS997=null;
		Token KW_ALL998=null;
		Token KW_MINUS999=null;
		Token KW_DISTINCT1000=null;

		ASTNode KW_UNION985_tree=null;
		ASTNode KW_ALL986_tree=null;
		ASTNode KW_UNION987_tree=null;
		ASTNode KW_DISTINCT988_tree=null;
		ASTNode KW_INTERSECT989_tree=null;
		ASTNode KW_ALL990_tree=null;
		ASTNode KW_INTERSECT991_tree=null;
		ASTNode KW_DISTINCT992_tree=null;
		ASTNode KW_EXCEPT993_tree=null;
		ASTNode KW_ALL994_tree=null;
		ASTNode KW_EXCEPT995_tree=null;
		ASTNode KW_DISTINCT996_tree=null;
		ASTNode KW_MINUS997_tree=null;
		ASTNode KW_ALL998_tree=null;
		ASTNode KW_MINUS999_tree=null;
		ASTNode KW_DISTINCT1000_tree=null;
		RewriteRuleTokenStream stream_KW_INTERSECT=new RewriteRuleTokenStream(adaptor,"token KW_INTERSECT");
		RewriteRuleTokenStream stream_KW_EXCEPT=new RewriteRuleTokenStream(adaptor,"token KW_EXCEPT");
		RewriteRuleTokenStream stream_KW_UNION=new RewriteRuleTokenStream(adaptor,"token KW_UNION");
		RewriteRuleTokenStream stream_KW_DISTINCT=new RewriteRuleTokenStream(adaptor,"token KW_DISTINCT");
		RewriteRuleTokenStream stream_KW_ALL=new RewriteRuleTokenStream(adaptor,"token KW_ALL");
		RewriteRuleTokenStream stream_KW_MINUS=new RewriteRuleTokenStream(adaptor,"token KW_MINUS");

		 pushMsg("set operator", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2569:5: ( KW_UNION KW_ALL -> ^( TOK_UNIONALL ) | KW_UNION ( KW_DISTINCT )? -> ^( TOK_UNIONDISTINCT ) | KW_INTERSECT KW_ALL -> ^( TOK_INTERSECTALL ) | KW_INTERSECT ( KW_DISTINCT )? -> ^( TOK_INTERSECTDISTINCT ) | KW_EXCEPT KW_ALL -> ^( TOK_EXCEPTALL ) | KW_EXCEPT ( KW_DISTINCT )? -> ^( TOK_EXCEPTDISTINCT ) | KW_MINUS KW_ALL -> ^( TOK_EXCEPTALL ) | KW_MINUS ( KW_DISTINCT )? -> ^( TOK_EXCEPTDISTINCT ) )
			int alt298=8;
			switch ( input.LA(1) ) {
			case KW_UNION:
				{
				int LA298_1 = input.LA(2);
				if ( (LA298_1==KW_ALL) ) {
					alt298=1;
				}
				else if ( (LA298_1==KW_DISTINCT||LA298_1==KW_FROM||LA298_1==KW_MAP||LA298_1==KW_REDUCE||LA298_1==KW_SELECT||LA298_1==LPAREN) ) {
					alt298=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 298, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_INTERSECT:
				{
				int LA298_2 = input.LA(2);
				if ( (LA298_2==KW_ALL) ) {
					alt298=3;
				}
				else if ( (LA298_2==KW_DISTINCT||LA298_2==KW_FROM||LA298_2==KW_MAP||LA298_2==KW_REDUCE||LA298_2==KW_SELECT||LA298_2==LPAREN) ) {
					alt298=4;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 298, 2, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_EXCEPT:
				{
				int LA298_3 = input.LA(2);
				if ( (LA298_3==KW_ALL) ) {
					alt298=5;
				}
				else if ( (LA298_3==KW_DISTINCT||LA298_3==KW_FROM||LA298_3==KW_MAP||LA298_3==KW_REDUCE||LA298_3==KW_SELECT||LA298_3==LPAREN) ) {
					alt298=6;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 298, 3, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			case KW_MINUS:
				{
				int LA298_4 = input.LA(2);
				if ( (LA298_4==KW_ALL) ) {
					alt298=7;
				}
				else if ( (LA298_4==KW_DISTINCT||LA298_4==KW_FROM||LA298_4==KW_MAP||LA298_4==KW_REDUCE||LA298_4==KW_SELECT||LA298_4==LPAREN) ) {
					alt298=8;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 298, 4, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 298, 0, input);
				throw nvae;
			}
			switch (alt298) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2569:7: KW_UNION KW_ALL
					{
					KW_UNION985=(Token)match(input,KW_UNION,FOLLOW_KW_UNION_in_setOperator16572); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_UNION.add(KW_UNION985);

					KW_ALL986=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_setOperator16574); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALL.add(KW_ALL986);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2569:23: -> ^( TOK_UNIONALL )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2569:26: ^( TOK_UNIONALL )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UNIONALL, "TOK_UNIONALL"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2570:7: KW_UNION ( KW_DISTINCT )?
					{
					KW_UNION987=(Token)match(input,KW_UNION,FOLLOW_KW_UNION_in_setOperator16588); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_UNION.add(KW_UNION987);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2570:16: ( KW_DISTINCT )?
					int alt294=2;
					int LA294_0 = input.LA(1);
					if ( (LA294_0==KW_DISTINCT) ) {
						alt294=1;
					}
					switch (alt294) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2570:16: KW_DISTINCT
							{
							KW_DISTINCT988=(Token)match(input,KW_DISTINCT,FOLLOW_KW_DISTINCT_in_setOperator16590); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DISTINCT.add(KW_DISTINCT988);

							}
							break;

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2570:29: -> ^( TOK_UNIONDISTINCT )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2570:32: ^( TOK_UNIONDISTINCT )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UNIONDISTINCT, "TOK_UNIONDISTINCT"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2571:7: KW_INTERSECT KW_ALL
					{
					KW_INTERSECT989=(Token)match(input,KW_INTERSECT,FOLLOW_KW_INTERSECT_in_setOperator16605); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INTERSECT.add(KW_INTERSECT989);

					KW_ALL990=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_setOperator16607); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALL.add(KW_ALL990);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2571:27: -> ^( TOK_INTERSECTALL )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2571:30: ^( TOK_INTERSECTALL )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INTERSECTALL, "TOK_INTERSECTALL"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2572:7: KW_INTERSECT ( KW_DISTINCT )?
					{
					KW_INTERSECT991=(Token)match(input,KW_INTERSECT,FOLLOW_KW_INTERSECT_in_setOperator16621); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INTERSECT.add(KW_INTERSECT991);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2572:20: ( KW_DISTINCT )?
					int alt295=2;
					int LA295_0 = input.LA(1);
					if ( (LA295_0==KW_DISTINCT) ) {
						alt295=1;
					}
					switch (alt295) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2572:20: KW_DISTINCT
							{
							KW_DISTINCT992=(Token)match(input,KW_DISTINCT,FOLLOW_KW_DISTINCT_in_setOperator16623); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DISTINCT.add(KW_DISTINCT992);

							}
							break;

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2572:33: -> ^( TOK_INTERSECTDISTINCT )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2572:36: ^( TOK_INTERSECTDISTINCT )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INTERSECTDISTINCT, "TOK_INTERSECTDISTINCT"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 5 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2573:7: KW_EXCEPT KW_ALL
					{
					KW_EXCEPT993=(Token)match(input,KW_EXCEPT,FOLLOW_KW_EXCEPT_in_setOperator16638); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_EXCEPT.add(KW_EXCEPT993);

					KW_ALL994=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_setOperator16640); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALL.add(KW_ALL994);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2573:24: -> ^( TOK_EXCEPTALL )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2573:27: ^( TOK_EXCEPTALL )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXCEPTALL, "TOK_EXCEPTALL"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 6 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2574:7: KW_EXCEPT ( KW_DISTINCT )?
					{
					KW_EXCEPT995=(Token)match(input,KW_EXCEPT,FOLLOW_KW_EXCEPT_in_setOperator16654); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_EXCEPT.add(KW_EXCEPT995);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2574:17: ( KW_DISTINCT )?
					int alt296=2;
					int LA296_0 = input.LA(1);
					if ( (LA296_0==KW_DISTINCT) ) {
						alt296=1;
					}
					switch (alt296) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2574:17: KW_DISTINCT
							{
							KW_DISTINCT996=(Token)match(input,KW_DISTINCT,FOLLOW_KW_DISTINCT_in_setOperator16656); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DISTINCT.add(KW_DISTINCT996);

							}
							break;

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2574:30: -> ^( TOK_EXCEPTDISTINCT )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2574:33: ^( TOK_EXCEPTDISTINCT )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXCEPTDISTINCT, "TOK_EXCEPTDISTINCT"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 7 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2575:7: KW_MINUS KW_ALL
					{
					KW_MINUS997=(Token)match(input,KW_MINUS,FOLLOW_KW_MINUS_in_setOperator16671); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_MINUS.add(KW_MINUS997);

					KW_ALL998=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_setOperator16673); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ALL.add(KW_ALL998);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2575:23: -> ^( TOK_EXCEPTALL )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2575:26: ^( TOK_EXCEPTALL )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXCEPTALL, "TOK_EXCEPTALL"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 8 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2576:7: KW_MINUS ( KW_DISTINCT )?
					{
					KW_MINUS999=(Token)match(input,KW_MINUS,FOLLOW_KW_MINUS_in_setOperator16687); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_MINUS.add(KW_MINUS999);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2576:16: ( KW_DISTINCT )?
					int alt297=2;
					int LA297_0 = input.LA(1);
					if ( (LA297_0==KW_DISTINCT) ) {
						alt297=1;
					}
					switch (alt297) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2576:16: KW_DISTINCT
							{
							KW_DISTINCT1000=(Token)match(input,KW_DISTINCT,FOLLOW_KW_DISTINCT_in_setOperator16689); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_DISTINCT.add(KW_DISTINCT1000);

							}
							break;

					}

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2576:29: -> ^( TOK_EXCEPTDISTINCT )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2576:32: ^( TOK_EXCEPTDISTINCT )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_EXCEPTDISTINCT, "TOK_EXCEPTDISTINCT"), root_1);
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "setOperator"


	public static class queryStatementExpression_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "queryStatementExpression"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2579:1: queryStatementExpression : (w= withClause )? queryStatementExpressionBody -> queryStatementExpressionBody ;
	public final HiveParser.queryStatementExpression_return queryStatementExpression() throws RecognitionException {
		HiveParser.queryStatementExpression_return retval = new HiveParser.queryStatementExpression_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope w =null;
		ParserRuleReturnScope queryStatementExpressionBody1001 =null;

		RewriteRuleSubtreeStream stream_withClause=new RewriteRuleSubtreeStream(adaptor,"rule withClause");
		RewriteRuleSubtreeStream stream_queryStatementExpressionBody=new RewriteRuleSubtreeStream(adaptor,"rule queryStatementExpressionBody");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2580:5: ( (w= withClause )? queryStatementExpressionBody -> queryStatementExpressionBody )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2585:5: (w= withClause )? queryStatementExpressionBody
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2585:5: (w= withClause )?
			int alt299=2;
			int LA299_0 = input.LA(1);
			if ( (LA299_0==KW_WITH) ) {
				alt299=1;
			}
			switch (alt299) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2585:6: w= withClause
					{
					pushFollow(FOLLOW_withClause_in_queryStatementExpression16726);
					w=withClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_withClause.add(w.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_queryStatementExpressionBody_in_queryStatementExpression16734);
			queryStatementExpressionBody1001=queryStatementExpressionBody();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_queryStatementExpressionBody.add(queryStatementExpressionBody1001.getTree());
			if ( state.backtracking==0 ) {
			      if ((w!=null?((ASTNode)w.getTree()):null) != null) {
			      (queryStatementExpressionBody1001!=null?((ASTNode)queryStatementExpressionBody1001.getTree()):null).insertChild(0, (w!=null?((ASTNode)w.getTree()):null));
			      }
			    }
			// AST REWRITE
			// elements: queryStatementExpressionBody
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2591:5: -> queryStatementExpressionBody
			{
				adaptor.addChild(root_0, stream_queryStatementExpressionBody.nextTree());
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "queryStatementExpression"


	public static class queryStatementExpressionBody_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "queryStatementExpressionBody"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2594:1: queryStatementExpressionBody : ( fromStatement | regularBody );
	public final HiveParser.queryStatementExpressionBody_return queryStatementExpressionBody() throws RecognitionException {
		HiveParser.queryStatementExpressionBody_return retval = new HiveParser.queryStatementExpressionBody_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope fromStatement1002 =null;
		ParserRuleReturnScope regularBody1003 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2595:5: ( fromStatement | regularBody )
			int alt300=2;
			int LA300_0 = input.LA(1);
			if ( (LA300_0==KW_FROM) ) {
				alt300=1;
			}
			else if ( (LA300_0==KW_INSERT||LA300_0==KW_MAP||LA300_0==KW_REDUCE||LA300_0==KW_SELECT||LA300_0==LPAREN) ) {
				alt300=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 300, 0, input);
				throw nvae;
			}

			switch (alt300) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2596:5: fromStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_fromStatement_in_queryStatementExpressionBody16766);
					fromStatement1002=fromStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, fromStatement1002.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2597:7: regularBody
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_regularBody_in_queryStatementExpressionBody16774);
					regularBody1003=regularBody();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, regularBody1003.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "queryStatementExpressionBody"


	public static class withClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "withClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2600:1: withClause : KW_WITH cteStatement ( COMMA cteStatement )* -> ^( TOK_CTE ( cteStatement )+ ) ;
	public final HiveParser.withClause_return withClause() throws RecognitionException {
		HiveParser.withClause_return retval = new HiveParser.withClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_WITH1004=null;
		Token COMMA1006=null;
		ParserRuleReturnScope cteStatement1005 =null;
		ParserRuleReturnScope cteStatement1007 =null;

		ASTNode KW_WITH1004_tree=null;
		ASTNode COMMA1006_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
		RewriteRuleSubtreeStream stream_cteStatement=new RewriteRuleSubtreeStream(adaptor,"rule cteStatement");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2601:3: ( KW_WITH cteStatement ( COMMA cteStatement )* -> ^( TOK_CTE ( cteStatement )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2602:3: KW_WITH cteStatement ( COMMA cteStatement )*
			{
			KW_WITH1004=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_withClause16791); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WITH.add(KW_WITH1004);

			pushFollow(FOLLOW_cteStatement_in_withClause16793);
			cteStatement1005=cteStatement();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_cteStatement.add(cteStatement1005.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2602:24: ( COMMA cteStatement )*
			loop301:
			while (true) {
				int alt301=2;
				int LA301_0 = input.LA(1);
				if ( (LA301_0==COMMA) ) {
					alt301=1;
				}

				switch (alt301) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2602:25: COMMA cteStatement
					{
					COMMA1006=(Token)match(input,COMMA,FOLLOW_COMMA_in_withClause16796); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA1006);

					pushFollow(FOLLOW_cteStatement_in_withClause16798);
					cteStatement1007=cteStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_cteStatement.add(cteStatement1007.getTree());
					}
					break;

				default :
					break loop301;
				}
			}

			// AST REWRITE
			// elements: cteStatement
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2602:46: -> ^( TOK_CTE ( cteStatement )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2602:49: ^( TOK_CTE ( cteStatement )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_CTE, "TOK_CTE"), root_1);
				if ( !(stream_cteStatement.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_cteStatement.hasNext() ) {
					adaptor.addChild(root_1, stream_cteStatement.nextTree());
				}
				stream_cteStatement.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "withClause"


	public static class cteStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "cteStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2605:1: cteStatement : identifier KW_AS LPAREN queryStatementExpression RPAREN -> ^( TOK_SUBQUERY queryStatementExpression identifier ) ;
	public final HiveParser.cteStatement_return cteStatement() throws RecognitionException {
		HiveParser.cteStatement_return retval = new HiveParser.cteStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_AS1009=null;
		Token LPAREN1010=null;
		Token RPAREN1012=null;
		ParserRuleReturnScope identifier1008 =null;
		ParserRuleReturnScope queryStatementExpression1011 =null;

		ASTNode KW_AS1009_tree=null;
		ASTNode LPAREN1010_tree=null;
		ASTNode RPAREN1012_tree=null;
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_queryStatementExpression=new RewriteRuleSubtreeStream(adaptor,"rule queryStatementExpression");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2606:4: ( identifier KW_AS LPAREN queryStatementExpression RPAREN -> ^( TOK_SUBQUERY queryStatementExpression identifier ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2607:4: identifier KW_AS LPAREN queryStatementExpression RPAREN
			{
			pushFollow(FOLLOW_identifier_in_cteStatement16824);
			identifier1008=identifier();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_identifier.add(identifier1008.getTree());
			KW_AS1009=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_cteStatement16826); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS1009);

			LPAREN1010=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_cteStatement16828); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN1010);

			pushFollow(FOLLOW_queryStatementExpression_in_cteStatement16830);
			queryStatementExpression1011=queryStatementExpression();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_queryStatementExpression.add(queryStatementExpression1011.getTree());
			RPAREN1012=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_cteStatement16832); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN1012);

			// AST REWRITE
			// elements: identifier, queryStatementExpression
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2608:4: -> ^( TOK_SUBQUERY queryStatementExpression identifier )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2608:7: ^( TOK_SUBQUERY queryStatementExpression identifier )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY"), root_1);
				adaptor.addChild(root_1, stream_queryStatementExpression.nextTree());
				adaptor.addChild(root_1, stream_identifier.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "cteStatement"


	public static class fromStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "fromStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2611:1: fromStatement : ( singleFromStatement -> singleFromStatement ) (u= setOperator r= singleFromStatement -> ^( $u $r) )* -> {u != null}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) ->;
	public final HiveParser.fromStatement_return fromStatement() throws RecognitionException {
		HiveParser.fromStatement_return retval = new HiveParser.fromStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope u =null;
		ParserRuleReturnScope r =null;
		ParserRuleReturnScope singleFromStatement1013 =null;

		RewriteRuleSubtreeStream stream_setOperator=new RewriteRuleSubtreeStream(adaptor,"rule setOperator");
		RewriteRuleSubtreeStream stream_singleFromStatement=new RewriteRuleSubtreeStream(adaptor,"rule singleFromStatement");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2612:3: ( ( singleFromStatement -> singleFromStatement ) (u= setOperator r= singleFromStatement -> ^( $u $r) )* -> {u != null}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) ->)
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2612:3: ( singleFromStatement -> singleFromStatement ) (u= setOperator r= singleFromStatement -> ^( $u $r) )*
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2612:3: ( singleFromStatement -> singleFromStatement )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2612:4: singleFromStatement
			{
			pushFollow(FOLLOW_singleFromStatement_in_fromStatement16855);
			singleFromStatement1013=singleFromStatement();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_singleFromStatement.add(singleFromStatement1013.getTree());
			// AST REWRITE
			// elements: singleFromStatement
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2612:25: -> singleFromStatement
			{
				adaptor.addChild(root_0, stream_singleFromStatement.nextTree());
			}


			retval.tree = root_0;
			}

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2613:2: (u= setOperator r= singleFromStatement -> ^( $u $r) )*
			loop302:
			while (true) {
				int alt302=2;
				int LA302_0 = input.LA(1);
				if ( (LA302_0==KW_EXCEPT||LA302_0==KW_INTERSECT||LA302_0==KW_MINUS||LA302_0==KW_UNION) ) {
					alt302=1;
				}

				switch (alt302) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2613:3: u= setOperator r= singleFromStatement
					{
					pushFollow(FOLLOW_setOperator_in_fromStatement16867);
					u=setOperator();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_setOperator.add(u.getTree());
					pushFollow(FOLLOW_singleFromStatement_in_fromStatement16871);
					r=singleFromStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_singleFromStatement.add(r.getTree());
					// AST REWRITE
					// elements: r, u
					// token labels: 
					// rule labels: r, u, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_r=new RewriteRuleSubtreeStream(adaptor,"rule r",r!=null?r.getTree():null);
					RewriteRuleSubtreeStream stream_u=new RewriteRuleSubtreeStream(adaptor,"rule u",u!=null?u.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2614:4: -> ^( $u $r)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2614:7: ^( $u $r)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot(stream_u.nextNode(), root_1);
						adaptor.addChild(root_1, retval.tree);
						adaptor.addChild(root_1, stream_r.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

				default :
					break loop302;
				}
			}

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2616:3: -> {u != null}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
			if (u != null) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2616:19: ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2617:9: ^( TOK_FROM ^( TOK_SUBQUERY ) )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"), root_2);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2618:11: ^( TOK_SUBQUERY )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY"), root_3);
				adaptor.addChild(root_3, retval.tree);
				adaptor.addChild(root_3, adaptor.create(Identifier, generateUnionAlias()));
				adaptor.addChild(root_2, root_3);
				}

				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2623:9: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2624:12: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_3);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2624:30: ^( TOK_DIR TOK_TMP_FILE )
				{
				ASTNode root_4 = (ASTNode)adaptor.nil();
				root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_4);
				adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE"));
				adaptor.addChild(root_3, root_4);
				}

				adaptor.addChild(root_2, root_3);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2625:12: ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELECT, "TOK_SELECT"), root_3);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2625:25: ^( TOK_SELEXPR TOK_SETCOLREF )
				{
				ASTNode root_4 = (ASTNode)adaptor.nil();
				root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR"), root_4);
				adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_SETCOLREF, "TOK_SETCOLREF"));
				adaptor.addChild(root_3, root_4);
				}

				adaptor.addChild(root_2, root_3);
				}

				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2628:5: ->
			{
				adaptor.addChild(root_0, retval.tree);
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "fromStatement"


	public static class singleFromStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "singleFromStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2632:1: singleFromStatement : fromClause (b+= body )+ -> ^( TOK_QUERY fromClause ( body )+ ) ;
	public final HiveParser.singleFromStatement_return singleFromStatement() throws RecognitionException {
		HiveParser.singleFromStatement_return retval = new HiveParser.singleFromStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		List list_b=null;
		ParserRuleReturnScope fromClause1014 =null;
		RuleReturnScope b = null;
		RewriteRuleSubtreeStream stream_fromClause=new RewriteRuleSubtreeStream(adaptor,"rule fromClause");
		RewriteRuleSubtreeStream stream_body=new RewriteRuleSubtreeStream(adaptor,"rule body");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2633:5: ( fromClause (b+= body )+ -> ^( TOK_QUERY fromClause ( body )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2634:5: fromClause (b+= body )+
			{
			pushFollow(FOLLOW_fromClause_in_singleFromStatement17078);
			fromClause1014=fromClause();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_fromClause.add(fromClause1014.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2635:5: (b+= body )+
			int cnt303=0;
			loop303:
			while (true) {
				int alt303=2;
				int LA303_0 = input.LA(1);
				if ( (LA303_0==KW_INSERT||LA303_0==KW_MAP||LA303_0==KW_REDUCE||LA303_0==KW_SELECT) ) {
					alt303=1;
				}

				switch (alt303) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2635:7: b+= body
					{
					pushFollow(FOLLOW_body_in_singleFromStatement17088);
					b=body();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_body.add(b.getTree());
					if (list_b==null) list_b=new ArrayList();
					list_b.add(b.getTree());
					}
					break;

				default :
					if ( cnt303 >= 1 ) break loop303;
					if (state.backtracking>0) {state.failed=true; return retval;}
					EarlyExitException eee = new EarlyExitException(303, input);
					throw eee;
				}
				cnt303++;
			}

			// AST REWRITE
			// elements: fromClause, body
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2635:18: -> ^( TOK_QUERY fromClause ( body )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2635:21: ^( TOK_QUERY fromClause ( body )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
				adaptor.addChild(root_1, stream_fromClause.nextTree());
				if ( !(stream_body.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_body.hasNext() ) {
					adaptor.addChild(root_1, stream_body.nextTree());
				}
				stream_body.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "singleFromStatement"


	public static class regularBody_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "regularBody"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2644:1: regularBody : (i= insertClause (s= selectStatement ->| valuesClause -> ^( TOK_QUERY ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) ) ) ) ) | selectStatement );
	public final HiveParser.regularBody_return regularBody() throws RecognitionException {
		HiveParser.regularBody_return retval = new HiveParser.regularBody_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope i =null;
		ParserRuleReturnScope s =null;
		ParserRuleReturnScope valuesClause1015 =null;
		ParserRuleReturnScope selectStatement1016 =null;

		RewriteRuleSubtreeStream stream_insertClause=new RewriteRuleSubtreeStream(adaptor,"rule insertClause");
		RewriteRuleSubtreeStream stream_valuesClause=new RewriteRuleSubtreeStream(adaptor,"rule valuesClause");
		RewriteRuleSubtreeStream stream_selectStatement=new RewriteRuleSubtreeStream(adaptor,"rule selectStatement");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2645:4: (i= insertClause (s= selectStatement ->| valuesClause -> ^( TOK_QUERY ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) ) ) ) ) | selectStatement )
			int alt305=2;
			int LA305_0 = input.LA(1);
			if ( (LA305_0==KW_INSERT) ) {
				alt305=1;
			}
			else if ( (LA305_0==KW_MAP||LA305_0==KW_REDUCE||LA305_0==KW_SELECT||LA305_0==LPAREN) ) {
				alt305=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 305, 0, input);
				throw nvae;
			}

			switch (alt305) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2646:4: i= insertClause (s= selectStatement ->| valuesClause -> ^( TOK_QUERY ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) ) ) ) )
					{
					pushFollow(FOLLOW_insertClause_in_regularBody17125);
					i=insertClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_insertClause.add(i.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2647:4: (s= selectStatement ->| valuesClause -> ^( TOK_QUERY ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) ) ) ) )
					int alt304=2;
					int LA304_0 = input.LA(1);
					if ( (LA304_0==KW_MAP||LA304_0==KW_REDUCE||LA304_0==KW_SELECT||LA304_0==LPAREN) ) {
						alt304=1;
					}
					else if ( (LA304_0==KW_VALUES) ) {
						alt304=2;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						NoViableAltException nvae =
							new NoViableAltException("", 304, 0, input);
						throw nvae;
					}

					switch (alt304) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2648:4: s= selectStatement
							{
							pushFollow(FOLLOW_selectStatement_in_regularBody17137);
							s=selectStatement();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_selectStatement.add(s.getTree());
							if ( state.backtracking==0 ) {(s!=null?((ASTNode)s.getTree()):null).getFirstChildWithType(TOK_INSERT).replaceChildren(0, 0, (i!=null?((ASTNode)i.getTree()):null));}
							// AST REWRITE
							// elements: 
							// token labels: 
							// rule labels: retval
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 2649:82: ->
							{
								adaptor.addChild(root_0, (s!=null?((ASTNode)s.getTree()):null));
							}


							retval.tree = root_0;
							}

							}
							break;
						case 2 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2651:6: valuesClause
							{
							pushFollow(FOLLOW_valuesClause_in_regularBody17162);
							valuesClause1015=valuesClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_valuesClause.add(valuesClause1015.getTree());
							// AST REWRITE
							// elements: valuesClause
							// token labels: 
							// rule labels: retval
							// token list labels: 
							// rule list labels: 
							// wildcard labels: 
							if ( state.backtracking==0 ) {
							retval.tree = root_0;
							RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

							root_0 = (ASTNode)adaptor.nil();
							// 2652:7: -> ^( TOK_QUERY ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) ) ) )
							{
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:2652:10: ^( TOK_QUERY ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) ) ) )
								{
								ASTNode root_1 = (ASTNode)adaptor.nil();
								root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:2653:13: ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) ) )
								{
								ASTNode root_2 = (ASTNode)adaptor.nil();
								root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
								adaptor.addChild(root_2, (i!=null?((ASTNode)i.getTree()):null));
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:2653:36: ^( TOK_SELECT ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) ) )
								{
								ASTNode root_3 = (ASTNode)adaptor.nil();
								root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELECT, "TOK_SELECT"), root_3);
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:2653:49: ^( TOK_SELEXPR ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause ) )
								{
								ASTNode root_4 = (ASTNode)adaptor.nil();
								root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR"), root_4);
								// org/apache/hadoop/hive/ql/parse/HiveParser.g:2653:63: ^( TOK_FUNCTION Identifier[\"inline\"] valuesClause )
								{
								ASTNode root_5 = (ASTNode)adaptor.nil();
								root_5 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FUNCTION, "TOK_FUNCTION"), root_5);
								adaptor.addChild(root_5, (ASTNode)adaptor.create(Identifier, "inline"));
								adaptor.addChild(root_5, stream_valuesClause.nextTree());
								adaptor.addChild(root_4, root_5);
								}

								adaptor.addChild(root_3, root_4);
								}

								adaptor.addChild(root_2, root_3);
								}

								adaptor.addChild(root_1, root_2);
								}

								adaptor.addChild(root_0, root_1);
								}

							}


							retval.tree = root_0;
							}

							}
							break;

					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2657:4: selectStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_selectStatement_in_regularBody17235);
					selectStatement1016=selectStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, selectStatement1016.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "regularBody"


	public static class atomSelectStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "atomSelectStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2660:1: atomSelectStatement : (s= selectClause (f= fromClause )? (w= whereClause )? (g= groupByClause )? (h= havingClause )? (win= window_clause )? -> ^( TOK_QUERY ( $f)? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) $s ( $w)? ( $g)? ( $h)? ( $win)? ) ) | LPAREN ! selectStatement RPAREN !);
	public final HiveParser.atomSelectStatement_return atomSelectStatement() throws RecognitionException {
		HiveParser.atomSelectStatement_return retval = new HiveParser.atomSelectStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token LPAREN1017=null;
		Token RPAREN1019=null;
		ParserRuleReturnScope s =null;
		ParserRuleReturnScope f =null;
		ParserRuleReturnScope w =null;
		ParserRuleReturnScope g =null;
		ParserRuleReturnScope h =null;
		ParserRuleReturnScope win =null;
		ParserRuleReturnScope selectStatement1018 =null;

		ASTNode LPAREN1017_tree=null;
		ASTNode RPAREN1019_tree=null;
		RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
		RewriteRuleSubtreeStream stream_havingClause=new RewriteRuleSubtreeStream(adaptor,"rule havingClause");
		RewriteRuleSubtreeStream stream_fromClause=new RewriteRuleSubtreeStream(adaptor,"rule fromClause");
		RewriteRuleSubtreeStream stream_selectClause=new RewriteRuleSubtreeStream(adaptor,"rule selectClause");
		RewriteRuleSubtreeStream stream_groupByClause=new RewriteRuleSubtreeStream(adaptor,"rule groupByClause");
		RewriteRuleSubtreeStream stream_window_clause=new RewriteRuleSubtreeStream(adaptor,"rule window_clause");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2661:4: (s= selectClause (f= fromClause )? (w= whereClause )? (g= groupByClause )? (h= havingClause )? (win= window_clause )? -> ^( TOK_QUERY ( $f)? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) $s ( $w)? ( $g)? ( $h)? ( $win)? ) ) | LPAREN ! selectStatement RPAREN !)
			int alt311=2;
			int LA311_0 = input.LA(1);
			if ( (LA311_0==KW_MAP||LA311_0==KW_REDUCE||LA311_0==KW_SELECT) ) {
				alt311=1;
			}
			else if ( (LA311_0==LPAREN) ) {
				alt311=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 311, 0, input);
				throw nvae;
			}

			switch (alt311) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2662:4: s= selectClause (f= fromClause )? (w= whereClause )? (g= groupByClause )? (h= havingClause )? (win= window_clause )?
					{
					pushFollow(FOLLOW_selectClause_in_atomSelectStatement17255);
					s=selectClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_selectClause.add(s.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2663:5: (f= fromClause )?
					int alt306=2;
					int LA306_0 = input.LA(1);
					if ( (LA306_0==KW_FROM) ) {
						alt306=1;
					}
					switch (alt306) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2663:5: f= fromClause
							{
							pushFollow(FOLLOW_fromClause_in_atomSelectStatement17262);
							f=fromClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_fromClause.add(f.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2664:5: (w= whereClause )?
					int alt307=2;
					int LA307_0 = input.LA(1);
					if ( (LA307_0==KW_WHERE) ) {
						alt307=1;
					}
					switch (alt307) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2664:5: w= whereClause
							{
							pushFollow(FOLLOW_whereClause_in_atomSelectStatement17270);
							w=whereClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_whereClause.add(w.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2665:5: (g= groupByClause )?
					int alt308=2;
					int LA308_0 = input.LA(1);
					if ( (LA308_0==KW_GROUP) ) {
						alt308=1;
					}
					switch (alt308) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2665:5: g= groupByClause
							{
							pushFollow(FOLLOW_groupByClause_in_atomSelectStatement17278);
							g=groupByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_groupByClause.add(g.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2666:5: (h= havingClause )?
					int alt309=2;
					int LA309_0 = input.LA(1);
					if ( (LA309_0==KW_HAVING) ) {
						alt309=1;
					}
					switch (alt309) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2666:5: h= havingClause
							{
							pushFollow(FOLLOW_havingClause_in_atomSelectStatement17286);
							h=havingClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_havingClause.add(h.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2667:7: (win= window_clause )?
					int alt310=2;
					int LA310_0 = input.LA(1);
					if ( (LA310_0==KW_WINDOW) ) {
						alt310=1;
					}
					switch (alt310) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2667:7: win= window_clause
							{
							pushFollow(FOLLOW_window_clause_in_atomSelectStatement17294);
							win=window_clause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_window_clause.add(win.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: f, s, w, h, g, win
					// token labels: 
					// rule labels: s, f, w, g, h, win, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_s=new RewriteRuleSubtreeStream(adaptor,"rule s",s!=null?s.getTree():null);
					RewriteRuleSubtreeStream stream_f=new RewriteRuleSubtreeStream(adaptor,"rule f",f!=null?f.getTree():null);
					RewriteRuleSubtreeStream stream_w=new RewriteRuleSubtreeStream(adaptor,"rule w",w!=null?w.getTree():null);
					RewriteRuleSubtreeStream stream_g=new RewriteRuleSubtreeStream(adaptor,"rule g",g!=null?g.getTree():null);
					RewriteRuleSubtreeStream stream_h=new RewriteRuleSubtreeStream(adaptor,"rule h",h!=null?h.getTree():null);
					RewriteRuleSubtreeStream stream_win=new RewriteRuleSubtreeStream(adaptor,"rule win",win!=null?win.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2668:4: -> ^( TOK_QUERY ( $f)? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) $s ( $w)? ( $g)? ( $h)? ( $win)? ) )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2668:7: ^( TOK_QUERY ( $f)? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) $s ( $w)? ( $g)? ( $h)? ( $win)? ) )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2668:20: ( $f)?
						if ( stream_f.hasNext() ) {
							adaptor.addChild(root_1, stream_f.nextTree());
						}
						stream_f.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2668:23: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) $s ( $w)? ( $g)? ( $h)? ( $win)? )
						{
						ASTNode root_2 = (ASTNode)adaptor.nil();
						root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2668:36: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_3);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2668:54: ^( TOK_DIR TOK_TMP_FILE )
						{
						ASTNode root_4 = (ASTNode)adaptor.nil();
						root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_4);
						adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE"));
						adaptor.addChild(root_3, root_4);
						}

						adaptor.addChild(root_2, root_3);
						}

						adaptor.addChild(root_2, stream_s.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2669:26: ( $w)?
						if ( stream_w.hasNext() ) {
							adaptor.addChild(root_2, stream_w.nextTree());
						}
						stream_w.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2669:30: ( $g)?
						if ( stream_g.hasNext() ) {
							adaptor.addChild(root_2, stream_g.nextTree());
						}
						stream_g.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2669:34: ( $h)?
						if ( stream_h.hasNext() ) {
							adaptor.addChild(root_2, stream_h.nextTree());
						}
						stream_h.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2669:38: ( $win)?
						if ( stream_win.hasNext() ) {
							adaptor.addChild(root_2, stream_win.nextTree());
						}
						stream_win.reset();

						adaptor.addChild(root_1, root_2);
						}

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2671:4: LPAREN ! selectStatement RPAREN !
					{
					root_0 = (ASTNode)adaptor.nil();


					LPAREN1017=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_atomSelectStatement17372); if (state.failed) return retval;
					pushFollow(FOLLOW_selectStatement_in_atomSelectStatement17375);
					selectStatement1018=selectStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, selectStatement1018.getTree());

					RPAREN1019=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_atomSelectStatement17377); if (state.failed) return retval;
					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "atomSelectStatement"


	public static class selectStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "selectStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2674:1: selectStatement : a= atomSelectStatement (set= setOpSelectStatement[$atomSelectStatement.tree] )? (o= orderByClause )? (c= clusterByClause )? (d= distributeByClause )? (sort= sortByClause )? (l= limitClause )? -> {set == null}? -> {o==null && c==null && d==null && sort==null && l==null}? -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ( $o)? ( $c)? ( $d)? ( $sort)? ( $l)? ) ) ;
	public final HiveParser.selectStatement_return selectStatement() throws RecognitionException {
		HiveParser.selectStatement_return retval = new HiveParser.selectStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope a =null;
		ParserRuleReturnScope set =null;
		ParserRuleReturnScope o =null;
		ParserRuleReturnScope c =null;
		ParserRuleReturnScope d =null;
		ParserRuleReturnScope sort =null;
		ParserRuleReturnScope l =null;

		RewriteRuleSubtreeStream stream_clusterByClause=new RewriteRuleSubtreeStream(adaptor,"rule clusterByClause");
		RewriteRuleSubtreeStream stream_setOpSelectStatement=new RewriteRuleSubtreeStream(adaptor,"rule setOpSelectStatement");
		RewriteRuleSubtreeStream stream_sortByClause=new RewriteRuleSubtreeStream(adaptor,"rule sortByClause");
		RewriteRuleSubtreeStream stream_distributeByClause=new RewriteRuleSubtreeStream(adaptor,"rule distributeByClause");
		RewriteRuleSubtreeStream stream_limitClause=new RewriteRuleSubtreeStream(adaptor,"rule limitClause");
		RewriteRuleSubtreeStream stream_atomSelectStatement=new RewriteRuleSubtreeStream(adaptor,"rule atomSelectStatement");
		RewriteRuleSubtreeStream stream_orderByClause=new RewriteRuleSubtreeStream(adaptor,"rule orderByClause");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2675:4: (a= atomSelectStatement (set= setOpSelectStatement[$atomSelectStatement.tree] )? (o= orderByClause )? (c= clusterByClause )? (d= distributeByClause )? (sort= sortByClause )? (l= limitClause )? -> {set == null}? -> {o==null && c==null && d==null && sort==null && l==null}? -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ( $o)? ( $c)? ( $d)? ( $sort)? ( $l)? ) ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2676:4: a= atomSelectStatement (set= setOpSelectStatement[$atomSelectStatement.tree] )? (o= orderByClause )? (c= clusterByClause )? (d= distributeByClause )? (sort= sortByClause )? (l= limitClause )?
			{
			pushFollow(FOLLOW_atomSelectStatement_in_selectStatement17398);
			a=atomSelectStatement();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_atomSelectStatement.add(a.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2677:7: (set= setOpSelectStatement[$atomSelectStatement.tree] )?
			int alt312=2;
			int LA312_0 = input.LA(1);
			if ( (LA312_0==KW_EXCEPT||LA312_0==KW_INTERSECT||LA312_0==KW_MINUS||LA312_0==KW_UNION) ) {
				alt312=1;
			}
			switch (alt312) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2677:7: set= setOpSelectStatement[$atomSelectStatement.tree]
					{
					pushFollow(FOLLOW_setOpSelectStatement_in_selectStatement17405);
					set=setOpSelectStatement((a!=null?((ASTNode)a.getTree()):null));
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_setOpSelectStatement.add(set.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2678:5: (o= orderByClause )?
			int alt313=2;
			int LA313_0 = input.LA(1);
			if ( (LA313_0==KW_ORDER) ) {
				alt313=1;
			}
			switch (alt313) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2678:5: o= orderByClause
					{
					pushFollow(FOLLOW_orderByClause_in_selectStatement17414);
					o=orderByClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_orderByClause.add(o.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2679:5: (c= clusterByClause )?
			int alt314=2;
			int LA314_0 = input.LA(1);
			if ( (LA314_0==KW_CLUSTER) ) {
				alt314=1;
			}
			switch (alt314) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2679:5: c= clusterByClause
					{
					pushFollow(FOLLOW_clusterByClause_in_selectStatement17422);
					c=clusterByClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_clusterByClause.add(c.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2680:5: (d= distributeByClause )?
			int alt315=2;
			int LA315_0 = input.LA(1);
			if ( (LA315_0==KW_DISTRIBUTE) ) {
				alt315=1;
			}
			switch (alt315) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2680:5: d= distributeByClause
					{
					pushFollow(FOLLOW_distributeByClause_in_selectStatement17430);
					d=distributeByClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_distributeByClause.add(d.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2681:8: (sort= sortByClause )?
			int alt316=2;
			int LA316_0 = input.LA(1);
			if ( (LA316_0==KW_SORT) ) {
				alt316=1;
			}
			switch (alt316) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2681:8: sort= sortByClause
					{
					pushFollow(FOLLOW_sortByClause_in_selectStatement17438);
					sort=sortByClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_sortByClause.add(sort.getTree());
					}
					break;

			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2682:5: (l= limitClause )?
			int alt317=2;
			int LA317_0 = input.LA(1);
			if ( (LA317_0==KW_LIMIT) ) {
				alt317=1;
			}
			switch (alt317) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2682:5: l= limitClause
					{
					pushFollow(FOLLOW_limitClause_in_selectStatement17446);
					l=limitClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_limitClause.add(l.getTree());
					}
					break;

			}

			if ( state.backtracking==0 ) {
			   if(set == null){
			   (a!=null?((ASTNode)a.getTree()):null).getFirstChildWithType(TOK_INSERT).addChild((o!=null?((ASTNode)o.getTree()):null));
			   (a!=null?((ASTNode)a.getTree()):null).getFirstChildWithType(TOK_INSERT).addChild((c!=null?((ASTNode)c.getTree()):null));
			   (a!=null?((ASTNode)a.getTree()):null).getFirstChildWithType(TOK_INSERT).addChild((d!=null?((ASTNode)d.getTree()):null));
			   (a!=null?((ASTNode)a.getTree()):null).getFirstChildWithType(TOK_INSERT).addChild((sort!=null?((ASTNode)sort.getTree()):null));
			   (a!=null?((ASTNode)a.getTree()):null).getFirstChildWithType(TOK_INSERT).addChild((l!=null?((ASTNode)l.getTree()):null));
			   }
			   }
			// AST REWRITE
			// elements: o, d, l, c, sort
			// token labels: 
			// rule labels: c, d, sort, l, retval, o
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_c=new RewriteRuleSubtreeStream(adaptor,"rule c",c!=null?c.getTree():null);
			RewriteRuleSubtreeStream stream_d=new RewriteRuleSubtreeStream(adaptor,"rule d",d!=null?d.getTree():null);
			RewriteRuleSubtreeStream stream_sort=new RewriteRuleSubtreeStream(adaptor,"rule sort",sort!=null?sort.getTree():null);
			RewriteRuleSubtreeStream stream_l=new RewriteRuleSubtreeStream(adaptor,"rule l",l!=null?l.getTree():null);
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
			RewriteRuleSubtreeStream stream_o=new RewriteRuleSubtreeStream(adaptor,"rule o",o!=null?o.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2692:4: -> {set == null}?
			if (set == null) {
				adaptor.addChild(root_0, (a!=null?((ASTNode)a.getTree()):null));
			}

			else // 2694:4: -> {o==null && c==null && d==null && sort==null && l==null}?
			if (o==null && c==null && d==null && sort==null && l==null) {
				adaptor.addChild(root_0, (set!=null?((ASTNode)set.getTree()):null));
			}

			else // 2696:4: -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ( $o)? ( $c)? ( $d)? ( $sort)? ( $l)? ) )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2696:7: ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ( $o)? ( $c)? ( $d)? ( $sort)? ( $l)? ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2697:11: ^( TOK_FROM ^( TOK_SUBQUERY ) )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"), root_2);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2698:13: ^( TOK_SUBQUERY )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY"), root_3);
				adaptor.addChild(root_3, (set!=null?((ASTNode)set.getTree()):null));
				adaptor.addChild(root_3, adaptor.create(Identifier, generateUnionAlias()));
				adaptor.addChild(root_2, root_3);
				}

				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2703:11: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ( $o)? ( $c)? ( $d)? ( $sort)? ( $l)? )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2704:14: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_3);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2704:32: ^( TOK_DIR TOK_TMP_FILE )
				{
				ASTNode root_4 = (ASTNode)adaptor.nil();
				root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_4);
				adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE"));
				adaptor.addChild(root_3, root_4);
				}

				adaptor.addChild(root_2, root_3);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2705:14: ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELECT, "TOK_SELECT"), root_3);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2705:27: ^( TOK_SELEXPR TOK_SETCOLREF )
				{
				ASTNode root_4 = (ASTNode)adaptor.nil();
				root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR"), root_4);
				adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_SETCOLREF, "TOK_SETCOLREF"));
				adaptor.addChild(root_3, root_4);
				}

				adaptor.addChild(root_2, root_3);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2706:15: ( $o)?
				if ( stream_o.hasNext() ) {
					adaptor.addChild(root_2, stream_o.nextTree());
				}
				stream_o.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2706:19: ( $c)?
				if ( stream_c.hasNext() ) {
					adaptor.addChild(root_2, stream_c.nextTree());
				}
				stream_c.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2706:23: ( $d)?
				if ( stream_d.hasNext() ) {
					adaptor.addChild(root_2, stream_d.nextTree());
				}
				stream_d.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2706:27: ( $sort)?
				if ( stream_sort.hasNext() ) {
					adaptor.addChild(root_2, stream_sort.nextTree());
				}
				stream_sort.reset();

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2706:34: ( $l)?
				if ( stream_l.hasNext() ) {
					adaptor.addChild(root_2, stream_l.nextTree());
				}
				stream_l.reset();

				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "selectStatement"


	public static class setOpSelectStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "setOpSelectStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2711:1: setOpSelectStatement[CommonTree t] : (u= setOperator b= atomSelectStatement -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()!=HiveParser.TOK_UNIONDISTINCT}? ^( $u $b) -> {$setOpSelectStatement.tree == null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> ^( $u $b) )+ -> {$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_UNIONALL\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTDISTINCT\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTALL\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTDISTINCT\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTALL}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) ->;
	public final HiveParser.setOpSelectStatement_return setOpSelectStatement(CommonTree t) throws RecognitionException {
		HiveParser.setOpSelectStatement_return retval = new HiveParser.setOpSelectStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope u =null;
		ParserRuleReturnScope b =null;

		RewriteRuleSubtreeStream stream_setOperator=new RewriteRuleSubtreeStream(adaptor,"rule setOperator");
		RewriteRuleSubtreeStream stream_atomSelectStatement=new RewriteRuleSubtreeStream(adaptor,"rule atomSelectStatement");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2712:4: ( (u= setOperator b= atomSelectStatement -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()!=HiveParser.TOK_UNIONDISTINCT}? ^( $u $b) -> {$setOpSelectStatement.tree == null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> ^( $u $b) )+ -> {$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_UNIONALL\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTDISTINCT\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTALL\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTDISTINCT\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTALL}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) ->)
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2713:4: (u= setOperator b= atomSelectStatement -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()!=HiveParser.TOK_UNIONDISTINCT}? ^( $u $b) -> {$setOpSelectStatement.tree == null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> ^( $u $b) )+
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2713:4: (u= setOperator b= atomSelectStatement -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()!=HiveParser.TOK_UNIONDISTINCT}? ^( $u $b) -> {$setOpSelectStatement.tree == null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) ) -> ^( $u $b) )+
			int cnt318=0;
			loop318:
			while (true) {
				int alt318=2;
				int LA318_0 = input.LA(1);
				if ( (LA318_0==KW_EXCEPT||LA318_0==KW_INTERSECT||LA318_0==KW_MINUS||LA318_0==KW_UNION) ) {
					alt318=1;
				}

				switch (alt318) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2713:5: u= setOperator b= atomSelectStatement
					{
					pushFollow(FOLLOW_setOperator_in_setOpSelectStatement17711);
					u=setOperator();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_setOperator.add(u.getTree());
					pushFollow(FOLLOW_atomSelectStatement_in_setOpSelectStatement17715);
					b=atomSelectStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_atomSelectStatement.add(b.getTree());
					// AST REWRITE
					// elements: b, b, u, u, b, b
					// token labels: 
					// rule labels: b, u, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.getTree():null);
					RewriteRuleSubtreeStream stream_u=new RewriteRuleSubtreeStream(adaptor,"rule u",u!=null?u.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2714:4: -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
					if (retval.tree != null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT) {
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2715:7: ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2716:11: ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) )
						{
						ASTNode root_2 = (ASTNode)adaptor.nil();
						root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"), root_2);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2717:13: ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY"), root_3);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2718:15: ^( TOK_UNIONALL $b)
						{
						ASTNode root_4 = (ASTNode)adaptor.nil();
						root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UNIONALL, "TOK_UNIONALL"), root_4);
						adaptor.addChild(root_4, retval.tree);
						adaptor.addChild(root_4, stream_b.nextTree());
						adaptor.addChild(root_3, root_4);
						}

						adaptor.addChild(root_3, adaptor.create(Identifier, generateUnionAlias()));
						adaptor.addChild(root_2, root_3);
						}

						adaptor.addChild(root_1, root_2);
						}

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2722:11: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) )
						{
						ASTNode root_2 = (ASTNode)adaptor.nil();
						root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2723:14: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_3);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2723:32: ^( TOK_DIR TOK_TMP_FILE )
						{
						ASTNode root_4 = (ASTNode)adaptor.nil();
						root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_4);
						adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE"));
						adaptor.addChild(root_3, root_4);
						}

						adaptor.addChild(root_2, root_3);
						}

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2724:14: ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELECTDI, "TOK_SELECTDI"), root_3);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2724:29: ^( TOK_SELEXPR TOK_SETCOLREF )
						{
						ASTNode root_4 = (ASTNode)adaptor.nil();
						root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR"), root_4);
						adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_SETCOLREF, "TOK_SETCOLREF"));
						adaptor.addChild(root_3, root_4);
						}

						adaptor.addChild(root_2, root_3);
						}

						adaptor.addChild(root_1, root_2);
						}

						adaptor.addChild(root_0, root_1);
						}

					}

					else // 2727:4: -> {$setOpSelectStatement.tree != null && ((CommonTree)u.getTree()).getType()!=HiveParser.TOK_UNIONDISTINCT}? ^( $u $b)
					if (retval.tree != null && ((CommonTree)u.getTree()).getType()!=HiveParser.TOK_UNIONDISTINCT) {
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2728:7: ^( $u $b)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot(stream_u.nextNode(), root_1);
						adaptor.addChild(root_1, retval.tree);
						adaptor.addChild(root_1, stream_b.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}

					else // 2729:4: -> {$setOpSelectStatement.tree == null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
					if (retval.tree == null && ((CommonTree)u.getTree()).getType()==HiveParser.TOK_UNIONDISTINCT) {
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2730:7: ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2731:11: ^( TOK_FROM ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) ) )
						{
						ASTNode root_2 = (ASTNode)adaptor.nil();
						root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"), root_2);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2732:13: ^( TOK_SUBQUERY ^( TOK_UNIONALL $b) )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY"), root_3);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2733:15: ^( TOK_UNIONALL $b)
						{
						ASTNode root_4 = (ASTNode)adaptor.nil();
						root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UNIONALL, "TOK_UNIONALL"), root_4);
						adaptor.addChild(root_4, t);
						adaptor.addChild(root_4, stream_b.nextTree());
						adaptor.addChild(root_3, root_4);
						}

						adaptor.addChild(root_3, adaptor.create(Identifier, generateUnionAlias()));
						adaptor.addChild(root_2, root_3);
						}

						adaptor.addChild(root_1, root_2);
						}

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2737:11: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) ) )
						{
						ASTNode root_2 = (ASTNode)adaptor.nil();
						root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2738:13: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_3);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2738:31: ^( TOK_DIR TOK_TMP_FILE )
						{
						ASTNode root_4 = (ASTNode)adaptor.nil();
						root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_4);
						adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE"));
						adaptor.addChild(root_3, root_4);
						}

						adaptor.addChild(root_2, root_3);
						}

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2739:13: ^( TOK_SELECTDI ^( TOK_SELEXPR TOK_SETCOLREF ) )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELECTDI, "TOK_SELECTDI"), root_3);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2739:28: ^( TOK_SELEXPR TOK_SETCOLREF )
						{
						ASTNode root_4 = (ASTNode)adaptor.nil();
						root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR"), root_4);
						adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_SETCOLREF, "TOK_SETCOLREF"));
						adaptor.addChild(root_3, root_4);
						}

						adaptor.addChild(root_2, root_3);
						}

						adaptor.addChild(root_1, root_2);
						}

						adaptor.addChild(root_0, root_1);
						}

					}

					else // 2742:4: -> ^( $u $b)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2742:7: ^( $u $b)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot(stream_u.nextNode(), root_1);
						adaptor.addChild(root_1, t);
						adaptor.addChild(root_1, stream_b.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

				default :
					if ( cnt318 >= 1 ) break loop318;
					if (state.backtracking>0) {state.failed=true; return retval;}
					EarlyExitException eee = new EarlyExitException(318, input);
					throw eee;
				}
				cnt318++;
			}

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2744:4: -> {$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_UNIONALL\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTDISTINCT\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTALL\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTDISTINCT\n ||$setOpSelectStatement.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTALL}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
			if (retval.tree.getChild(0).getType()==HiveParser.TOK_UNIONALL
			   ||retval.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTDISTINCT
			   ||retval.tree.getChild(0).getType()==HiveParser.TOK_INTERSECTALL
			   ||retval.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTDISTINCT
			   ||retval.tree.getChild(0).getType()==HiveParser.TOK_EXCEPTALL) {
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2749:7: ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) ) )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_QUERY, "TOK_QUERY"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2750:11: ^( TOK_FROM ^( TOK_SUBQUERY ) )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_FROM, "TOK_FROM"), root_2);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2751:13: ^( TOK_SUBQUERY )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY"), root_3);
				adaptor.addChild(root_3, retval.tree);
				adaptor.addChild(root_3, adaptor.create(Identifier, generateUnionAlias()));
				adaptor.addChild(root_2, root_3);
				}

				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2756:11: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) ) )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2757:14: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_3);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2757:32: ^( TOK_DIR TOK_TMP_FILE )
				{
				ASTNode root_4 = (ASTNode)adaptor.nil();
				root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_4);
				adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE"));
				adaptor.addChild(root_3, root_4);
				}

				adaptor.addChild(root_2, root_3);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2758:14: ^( TOK_SELECT ^( TOK_SELEXPR TOK_SETCOLREF ) )
				{
				ASTNode root_3 = (ASTNode)adaptor.nil();
				root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELECT, "TOK_SELECT"), root_3);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2758:27: ^( TOK_SELEXPR TOK_SETCOLREF )
				{
				ASTNode root_4 = (ASTNode)adaptor.nil();
				root_4 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR"), root_4);
				adaptor.addChild(root_4, (ASTNode)adaptor.create(TOK_SETCOLREF, "TOK_SETCOLREF"));
				adaptor.addChild(root_3, root_4);
				}

				adaptor.addChild(root_2, root_3);
				}

				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_0, root_1);
				}

			}

			else // 2761:4: ->
			{
				adaptor.addChild(root_0, retval.tree);
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "setOpSelectStatement"


	public static class selectStatementWithCTE_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "selectStatementWithCTE"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2764:1: selectStatementWithCTE : (w= withClause )? selectStatement -> selectStatement ;
	public final HiveParser.selectStatementWithCTE_return selectStatementWithCTE() throws RecognitionException {
		HiveParser.selectStatementWithCTE_return retval = new HiveParser.selectStatementWithCTE_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope w =null;
		ParserRuleReturnScope selectStatement1020 =null;

		RewriteRuleSubtreeStream stream_withClause=new RewriteRuleSubtreeStream(adaptor,"rule withClause");
		RewriteRuleSubtreeStream stream_selectStatement=new RewriteRuleSubtreeStream(adaptor,"rule selectStatement");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2765:5: ( (w= withClause )? selectStatement -> selectStatement )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2766:5: (w= withClause )? selectStatement
			{
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2766:5: (w= withClause )?
			int alt319=2;
			int LA319_0 = input.LA(1);
			if ( (LA319_0==KW_WITH) ) {
				alt319=1;
			}
			switch (alt319) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2766:6: w= withClause
					{
					pushFollow(FOLLOW_withClause_in_selectStatementWithCTE18350);
					w=withClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_withClause.add(w.getTree());
					}
					break;

			}

			pushFollow(FOLLOW_selectStatement_in_selectStatementWithCTE18358);
			selectStatement1020=selectStatement();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_selectStatement.add(selectStatement1020.getTree());
			if ( state.backtracking==0 ) {
			      if ((w!=null?((ASTNode)w.getTree()):null) != null) {
			      (selectStatement1020!=null?((ASTNode)selectStatement1020.getTree()):null).insertChild(0, (w!=null?((ASTNode)w.getTree()):null));
			      }
			    }
			// AST REWRITE
			// elements: selectStatement
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2772:5: -> selectStatement
			{
				adaptor.addChild(root_0, stream_selectStatement.nextTree());
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "selectStatementWithCTE"


	public static class body_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "body"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2775:1: body : ( insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( window_clause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( limitClause )? -> ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) | selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( window_clause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( limitClause )? -> ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) );
	public final HiveParser.body_return body() throws RecognitionException {
		HiveParser.body_return retval = new HiveParser.body_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope insertClause1021 =null;
		ParserRuleReturnScope selectClause1022 =null;
		ParserRuleReturnScope lateralView1023 =null;
		ParserRuleReturnScope whereClause1024 =null;
		ParserRuleReturnScope groupByClause1025 =null;
		ParserRuleReturnScope havingClause1026 =null;
		ParserRuleReturnScope window_clause1027 =null;
		ParserRuleReturnScope orderByClause1028 =null;
		ParserRuleReturnScope clusterByClause1029 =null;
		ParserRuleReturnScope distributeByClause1030 =null;
		ParserRuleReturnScope sortByClause1031 =null;
		ParserRuleReturnScope limitClause1032 =null;
		ParserRuleReturnScope selectClause1033 =null;
		ParserRuleReturnScope lateralView1034 =null;
		ParserRuleReturnScope whereClause1035 =null;
		ParserRuleReturnScope groupByClause1036 =null;
		ParserRuleReturnScope havingClause1037 =null;
		ParserRuleReturnScope window_clause1038 =null;
		ParserRuleReturnScope orderByClause1039 =null;
		ParserRuleReturnScope clusterByClause1040 =null;
		ParserRuleReturnScope distributeByClause1041 =null;
		ParserRuleReturnScope sortByClause1042 =null;
		ParserRuleReturnScope limitClause1043 =null;

		RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
		RewriteRuleSubtreeStream stream_havingClause=new RewriteRuleSubtreeStream(adaptor,"rule havingClause");
		RewriteRuleSubtreeStream stream_clusterByClause=new RewriteRuleSubtreeStream(adaptor,"rule clusterByClause");
		RewriteRuleSubtreeStream stream_lateralView=new RewriteRuleSubtreeStream(adaptor,"rule lateralView");
		RewriteRuleSubtreeStream stream_insertClause=new RewriteRuleSubtreeStream(adaptor,"rule insertClause");
		RewriteRuleSubtreeStream stream_selectClause=new RewriteRuleSubtreeStream(adaptor,"rule selectClause");
		RewriteRuleSubtreeStream stream_sortByClause=new RewriteRuleSubtreeStream(adaptor,"rule sortByClause");
		RewriteRuleSubtreeStream stream_groupByClause=new RewriteRuleSubtreeStream(adaptor,"rule groupByClause");
		RewriteRuleSubtreeStream stream_distributeByClause=new RewriteRuleSubtreeStream(adaptor,"rule distributeByClause");
		RewriteRuleSubtreeStream stream_limitClause=new RewriteRuleSubtreeStream(adaptor,"rule limitClause");
		RewriteRuleSubtreeStream stream_orderByClause=new RewriteRuleSubtreeStream(adaptor,"rule orderByClause");
		RewriteRuleSubtreeStream stream_window_clause=new RewriteRuleSubtreeStream(adaptor,"rule window_clause");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2776:4: ( insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( window_clause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( limitClause )? -> ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) | selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( window_clause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( limitClause )? -> ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) )
			int alt340=2;
			int LA340_0 = input.LA(1);
			if ( (LA340_0==KW_INSERT) ) {
				alt340=1;
			}
			else if ( (LA340_0==KW_MAP||LA340_0==KW_REDUCE||LA340_0==KW_SELECT) ) {
				alt340=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 340, 0, input);
				throw nvae;
			}

			switch (alt340) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2777:4: insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( window_clause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( limitClause )?
					{
					pushFollow(FOLLOW_insertClause_in_body18388);
					insertClause1021=insertClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_insertClause.add(insertClause1021.getTree());
					pushFollow(FOLLOW_selectClause_in_body18393);
					selectClause1022=selectClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_selectClause.add(selectClause1022.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2779:4: ( lateralView )?
					int alt320=2;
					int LA320_0 = input.LA(1);
					if ( (LA320_0==COMMA||LA320_0==KW_LATERAL) ) {
						alt320=1;
					}
					switch (alt320) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2779:4: lateralView
							{
							pushFollow(FOLLOW_lateralView_in_body18398);
							lateralView1023=lateralView();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_lateralView.add(lateralView1023.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2780:4: ( whereClause )?
					int alt321=2;
					int LA321_0 = input.LA(1);
					if ( (LA321_0==KW_WHERE) ) {
						alt321=1;
					}
					switch (alt321) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2780:4: whereClause
							{
							pushFollow(FOLLOW_whereClause_in_body18404);
							whereClause1024=whereClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_whereClause.add(whereClause1024.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2781:4: ( groupByClause )?
					int alt322=2;
					int LA322_0 = input.LA(1);
					if ( (LA322_0==KW_GROUP) ) {
						alt322=1;
					}
					switch (alt322) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2781:4: groupByClause
							{
							pushFollow(FOLLOW_groupByClause_in_body18410);
							groupByClause1025=groupByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_groupByClause.add(groupByClause1025.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2782:4: ( havingClause )?
					int alt323=2;
					int LA323_0 = input.LA(1);
					if ( (LA323_0==KW_HAVING) ) {
						alt323=1;
					}
					switch (alt323) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2782:4: havingClause
							{
							pushFollow(FOLLOW_havingClause_in_body18416);
							havingClause1026=havingClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_havingClause.add(havingClause1026.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2783:4: ( window_clause )?
					int alt324=2;
					int LA324_0 = input.LA(1);
					if ( (LA324_0==KW_WINDOW) ) {
						alt324=1;
					}
					switch (alt324) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2783:4: window_clause
							{
							pushFollow(FOLLOW_window_clause_in_body18422);
							window_clause1027=window_clause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_window_clause.add(window_clause1027.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2784:4: ( orderByClause )?
					int alt325=2;
					int LA325_0 = input.LA(1);
					if ( (LA325_0==KW_ORDER) ) {
						alt325=1;
					}
					switch (alt325) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2784:4: orderByClause
							{
							pushFollow(FOLLOW_orderByClause_in_body18428);
							orderByClause1028=orderByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_orderByClause.add(orderByClause1028.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2785:4: ( clusterByClause )?
					int alt326=2;
					int LA326_0 = input.LA(1);
					if ( (LA326_0==KW_CLUSTER) ) {
						alt326=1;
					}
					switch (alt326) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2785:4: clusterByClause
							{
							pushFollow(FOLLOW_clusterByClause_in_body18434);
							clusterByClause1029=clusterByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_clusterByClause.add(clusterByClause1029.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2786:4: ( distributeByClause )?
					int alt327=2;
					int LA327_0 = input.LA(1);
					if ( (LA327_0==KW_DISTRIBUTE) ) {
						alt327=1;
					}
					switch (alt327) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2786:4: distributeByClause
							{
							pushFollow(FOLLOW_distributeByClause_in_body18440);
							distributeByClause1030=distributeByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_distributeByClause.add(distributeByClause1030.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2787:4: ( sortByClause )?
					int alt328=2;
					int LA328_0 = input.LA(1);
					if ( (LA328_0==KW_SORT) ) {
						alt328=1;
					}
					switch (alt328) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2787:4: sortByClause
							{
							pushFollow(FOLLOW_sortByClause_in_body18446);
							sortByClause1031=sortByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_sortByClause.add(sortByClause1031.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2788:4: ( limitClause )?
					int alt329=2;
					int LA329_0 = input.LA(1);
					if ( (LA329_0==KW_LIMIT) ) {
						alt329=1;
					}
					switch (alt329) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2788:4: limitClause
							{
							pushFollow(FOLLOW_limitClause_in_body18452);
							limitClause1032=limitClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_limitClause.add(limitClause1032.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: havingClause, limitClause, selectClause, groupByClause, whereClause, sortByClause, window_clause, orderByClause, clusterByClause, lateralView, distributeByClause, insertClause
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2788:17: -> ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2788:20: ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_1);
						adaptor.addChild(root_1, stream_insertClause.nextTree());
						adaptor.addChild(root_1, stream_selectClause.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2789:35: ( lateralView )?
						if ( stream_lateralView.hasNext() ) {
							adaptor.addChild(root_1, stream_lateralView.nextTree());
						}
						stream_lateralView.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2789:48: ( whereClause )?
						if ( stream_whereClause.hasNext() ) {
							adaptor.addChild(root_1, stream_whereClause.nextTree());
						}
						stream_whereClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2789:61: ( groupByClause )?
						if ( stream_groupByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_groupByClause.nextTree());
						}
						stream_groupByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2789:76: ( havingClause )?
						if ( stream_havingClause.hasNext() ) {
							adaptor.addChild(root_1, stream_havingClause.nextTree());
						}
						stream_havingClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2789:90: ( orderByClause )?
						if ( stream_orderByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_orderByClause.nextTree());
						}
						stream_orderByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2789:105: ( clusterByClause )?
						if ( stream_clusterByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_clusterByClause.nextTree());
						}
						stream_clusterByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2790:22: ( distributeByClause )?
						if ( stream_distributeByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_distributeByClause.nextTree());
						}
						stream_distributeByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2790:42: ( sortByClause )?
						if ( stream_sortByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_sortByClause.nextTree());
						}
						stream_sortByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2790:56: ( window_clause )?
						if ( stream_window_clause.hasNext() ) {
							adaptor.addChild(root_1, stream_window_clause.nextTree());
						}
						stream_window_clause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2790:71: ( limitClause )?
						if ( stream_limitClause.hasNext() ) {
							adaptor.addChild(root_1, stream_limitClause.nextTree());
						}
						stream_limitClause.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2792:4: selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( window_clause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( limitClause )?
					{
					pushFollow(FOLLOW_selectClause_in_body18545);
					selectClause1033=selectClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_selectClause.add(selectClause1033.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2793:4: ( lateralView )?
					int alt330=2;
					int LA330_0 = input.LA(1);
					if ( (LA330_0==COMMA||LA330_0==KW_LATERAL) ) {
						alt330=1;
					}
					switch (alt330) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2793:4: lateralView
							{
							pushFollow(FOLLOW_lateralView_in_body18550);
							lateralView1034=lateralView();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_lateralView.add(lateralView1034.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2794:4: ( whereClause )?
					int alt331=2;
					int LA331_0 = input.LA(1);
					if ( (LA331_0==KW_WHERE) ) {
						alt331=1;
					}
					switch (alt331) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2794:4: whereClause
							{
							pushFollow(FOLLOW_whereClause_in_body18556);
							whereClause1035=whereClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_whereClause.add(whereClause1035.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2795:4: ( groupByClause )?
					int alt332=2;
					int LA332_0 = input.LA(1);
					if ( (LA332_0==KW_GROUP) ) {
						alt332=1;
					}
					switch (alt332) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2795:4: groupByClause
							{
							pushFollow(FOLLOW_groupByClause_in_body18562);
							groupByClause1036=groupByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_groupByClause.add(groupByClause1036.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2796:4: ( havingClause )?
					int alt333=2;
					int LA333_0 = input.LA(1);
					if ( (LA333_0==KW_HAVING) ) {
						alt333=1;
					}
					switch (alt333) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2796:4: havingClause
							{
							pushFollow(FOLLOW_havingClause_in_body18568);
							havingClause1037=havingClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_havingClause.add(havingClause1037.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2797:4: ( window_clause )?
					int alt334=2;
					int LA334_0 = input.LA(1);
					if ( (LA334_0==KW_WINDOW) ) {
						alt334=1;
					}
					switch (alt334) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2797:4: window_clause
							{
							pushFollow(FOLLOW_window_clause_in_body18574);
							window_clause1038=window_clause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_window_clause.add(window_clause1038.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2798:4: ( orderByClause )?
					int alt335=2;
					int LA335_0 = input.LA(1);
					if ( (LA335_0==KW_ORDER) ) {
						alt335=1;
					}
					switch (alt335) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2798:4: orderByClause
							{
							pushFollow(FOLLOW_orderByClause_in_body18580);
							orderByClause1039=orderByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_orderByClause.add(orderByClause1039.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2799:4: ( clusterByClause )?
					int alt336=2;
					int LA336_0 = input.LA(1);
					if ( (LA336_0==KW_CLUSTER) ) {
						alt336=1;
					}
					switch (alt336) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2799:4: clusterByClause
							{
							pushFollow(FOLLOW_clusterByClause_in_body18586);
							clusterByClause1040=clusterByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_clusterByClause.add(clusterByClause1040.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2800:4: ( distributeByClause )?
					int alt337=2;
					int LA337_0 = input.LA(1);
					if ( (LA337_0==KW_DISTRIBUTE) ) {
						alt337=1;
					}
					switch (alt337) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2800:4: distributeByClause
							{
							pushFollow(FOLLOW_distributeByClause_in_body18592);
							distributeByClause1041=distributeByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_distributeByClause.add(distributeByClause1041.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2801:4: ( sortByClause )?
					int alt338=2;
					int LA338_0 = input.LA(1);
					if ( (LA338_0==KW_SORT) ) {
						alt338=1;
					}
					switch (alt338) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2801:4: sortByClause
							{
							pushFollow(FOLLOW_sortByClause_in_body18598);
							sortByClause1042=sortByClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_sortByClause.add(sortByClause1042.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2802:4: ( limitClause )?
					int alt339=2;
					int LA339_0 = input.LA(1);
					if ( (LA339_0==KW_LIMIT) ) {
						alt339=1;
					}
					switch (alt339) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2802:4: limitClause
							{
							pushFollow(FOLLOW_limitClause_in_body18604);
							limitClause1043=limitClause();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_limitClause.add(limitClause1043.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: whereClause, groupByClause, window_clause, limitClause, orderByClause, clusterByClause, selectClause, lateralView, havingClause, distributeByClause, sortByClause
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2802:17: -> ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2802:20: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2802:33: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
						{
						ASTNode root_2 = (ASTNode)adaptor.nil();
						root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_2);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2802:51: ^( TOK_DIR TOK_TMP_FILE )
						{
						ASTNode root_3 = (ASTNode)adaptor.nil();
						root_3 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_3);
						adaptor.addChild(root_3, (ASTNode)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE"));
						adaptor.addChild(root_2, root_3);
						}

						adaptor.addChild(root_1, root_2);
						}

						adaptor.addChild(root_1, stream_selectClause.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2803:35: ( lateralView )?
						if ( stream_lateralView.hasNext() ) {
							adaptor.addChild(root_1, stream_lateralView.nextTree());
						}
						stream_lateralView.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2803:48: ( whereClause )?
						if ( stream_whereClause.hasNext() ) {
							adaptor.addChild(root_1, stream_whereClause.nextTree());
						}
						stream_whereClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2803:61: ( groupByClause )?
						if ( stream_groupByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_groupByClause.nextTree());
						}
						stream_groupByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2803:76: ( havingClause )?
						if ( stream_havingClause.hasNext() ) {
							adaptor.addChild(root_1, stream_havingClause.nextTree());
						}
						stream_havingClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2803:90: ( orderByClause )?
						if ( stream_orderByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_orderByClause.nextTree());
						}
						stream_orderByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2803:105: ( clusterByClause )?
						if ( stream_clusterByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_clusterByClause.nextTree());
						}
						stream_clusterByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2804:22: ( distributeByClause )?
						if ( stream_distributeByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_distributeByClause.nextTree());
						}
						stream_distributeByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2804:42: ( sortByClause )?
						if ( stream_sortByClause.hasNext() ) {
							adaptor.addChild(root_1, stream_sortByClause.nextTree());
						}
						stream_sortByClause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2804:56: ( window_clause )?
						if ( stream_window_clause.hasNext() ) {
							adaptor.addChild(root_1, stream_window_clause.nextTree());
						}
						stream_window_clause.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2804:71: ( limitClause )?
						if ( stream_limitClause.hasNext() ) {
							adaptor.addChild(root_1, stream_limitClause.nextTree());
						}
						stream_limitClause.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "body"


	public static class insertClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "insertClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2807:1: insertClause : ( KW_INSERT KW_OVERWRITE destination ( ifNotExists )? -> ^( TOK_DESTINATION destination ( ifNotExists )? ) | KW_INSERT KW_INTO ( KW_TABLE )? tableOrPartition ( LPAREN targetCols= columnNameList RPAREN )? -> ^( TOK_INSERT_INTO tableOrPartition ( $targetCols)? ) );
	public final HiveParser.insertClause_return insertClause() throws RecognitionException {
		HiveParser.insertClause_return retval = new HiveParser.insertClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_INSERT1044=null;
		Token KW_OVERWRITE1045=null;
		Token KW_INSERT1048=null;
		Token KW_INTO1049=null;
		Token KW_TABLE1050=null;
		Token LPAREN1052=null;
		Token RPAREN1053=null;
		ParserRuleReturnScope targetCols =null;
		ParserRuleReturnScope destination1046 =null;
		ParserRuleReturnScope ifNotExists1047 =null;
		ParserRuleReturnScope tableOrPartition1051 =null;

		ASTNode KW_INSERT1044_tree=null;
		ASTNode KW_OVERWRITE1045_tree=null;
		ASTNode KW_INSERT1048_tree=null;
		ASTNode KW_INTO1049_tree=null;
		ASTNode KW_TABLE1050_tree=null;
		ASTNode LPAREN1052_tree=null;
		ASTNode RPAREN1053_tree=null;
		RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
		RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
		RewriteRuleTokenStream stream_KW_INSERT=new RewriteRuleTokenStream(adaptor,"token KW_INSERT");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
		RewriteRuleTokenStream stream_KW_OVERWRITE=new RewriteRuleTokenStream(adaptor,"token KW_OVERWRITE");
		RewriteRuleSubtreeStream stream_destination=new RewriteRuleSubtreeStream(adaptor,"rule destination");
		RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
		RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
		RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");

		 pushMsg("insert clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2810:4: ( KW_INSERT KW_OVERWRITE destination ( ifNotExists )? -> ^( TOK_DESTINATION destination ( ifNotExists )? ) | KW_INSERT KW_INTO ( KW_TABLE )? tableOrPartition ( LPAREN targetCols= columnNameList RPAREN )? -> ^( TOK_INSERT_INTO tableOrPartition ( $targetCols)? ) )
			int alt344=2;
			int LA344_0 = input.LA(1);
			if ( (LA344_0==KW_INSERT) ) {
				int LA344_1 = input.LA(2);
				if ( (LA344_1==KW_OVERWRITE) ) {
					alt344=1;
				}
				else if ( (LA344_1==KW_INTO) ) {
					alt344=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 344, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 344, 0, input);
				throw nvae;
			}

			switch (alt344) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2811:6: KW_INSERT KW_OVERWRITE destination ( ifNotExists )?
					{
					KW_INSERT1044=(Token)match(input,KW_INSERT,FOLLOW_KW_INSERT_in_insertClause18725); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INSERT.add(KW_INSERT1044);

					KW_OVERWRITE1045=(Token)match(input,KW_OVERWRITE,FOLLOW_KW_OVERWRITE_in_insertClause18727); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_OVERWRITE.add(KW_OVERWRITE1045);

					pushFollow(FOLLOW_destination_in_insertClause18729);
					destination1046=destination();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_destination.add(destination1046.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2811:41: ( ifNotExists )?
					int alt341=2;
					int LA341_0 = input.LA(1);
					if ( (LA341_0==KW_IF) ) {
						alt341=1;
					}
					switch (alt341) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2811:41: ifNotExists
							{
							pushFollow(FOLLOW_ifNotExists_in_insertClause18731);
							ifNotExists1047=ifNotExists();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_ifNotExists.add(ifNotExists1047.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: ifNotExists, destination
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2811:54: -> ^( TOK_DESTINATION destination ( ifNotExists )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2811:57: ^( TOK_DESTINATION destination ( ifNotExists )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION"), root_1);
						adaptor.addChild(root_1, stream_destination.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2811:87: ( ifNotExists )?
						if ( stream_ifNotExists.hasNext() ) {
							adaptor.addChild(root_1, stream_ifNotExists.nextTree());
						}
						stream_ifNotExists.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2812:6: KW_INSERT KW_INTO ( KW_TABLE )? tableOrPartition ( LPAREN targetCols= columnNameList RPAREN )?
					{
					KW_INSERT1048=(Token)match(input,KW_INSERT,FOLLOW_KW_INSERT_in_insertClause18750); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INSERT.add(KW_INSERT1048);

					KW_INTO1049=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_insertClause18752); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_INTO.add(KW_INTO1049);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2812:24: ( KW_TABLE )?
					int alt342=2;
					int LA342_0 = input.LA(1);
					if ( (LA342_0==KW_TABLE) ) {
						alt342=1;
					}
					switch (alt342) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2812:24: KW_TABLE
							{
							KW_TABLE1050=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_insertClause18754); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE1050);

							}
							break;

					}

					pushFollow(FOLLOW_tableOrPartition_in_insertClause18757);
					tableOrPartition1051=tableOrPartition();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableOrPartition.add(tableOrPartition1051.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2812:51: ( LPAREN targetCols= columnNameList RPAREN )?
					int alt343=2;
					int LA343_0 = input.LA(1);
					if ( (LA343_0==LPAREN) ) {
						int LA343_1 = input.LA(2);
						if ( (LA343_1==Identifier||(LA343_1 >= KW_ABORT && LA343_1 <= KW_AFTER)||LA343_1==KW_ALLOC_FRACTION||LA343_1==KW_ANALYZE||LA343_1==KW_ARCHIVE||LA343_1==KW_ASC||(LA343_1 >= KW_AUTOCOMMIT && LA343_1 <= KW_BEFORE)||(LA343_1 >= KW_BUCKET && LA343_1 <= KW_BUCKETS)||(LA343_1 >= KW_CACHE && LA343_1 <= KW_CASCADE)||LA343_1==KW_CHANGE||(LA343_1 >= KW_CHECK && LA343_1 <= KW_COLLECTION)||(LA343_1 >= KW_COLUMNS && LA343_1 <= KW_COMMENT)||(LA343_1 >= KW_COMPACT && LA343_1 <= KW_CONCATENATE)||LA343_1==KW_CONTINUE||LA343_1==KW_DATA||LA343_1==KW_DATABASES||(LA343_1 >= KW_DATETIME && LA343_1 <= KW_DBPROPERTIES)||(LA343_1 >= KW_DEFAULT && LA343_1 <= KW_DEFINED)||(LA343_1 >= KW_DELIMITED && LA343_1 <= KW_DESC)||(LA343_1 >= KW_DETAIL && LA343_1 <= KW_DISABLE)||(LA343_1 >= KW_DISTRIBUTE && LA343_1 <= KW_DO)||LA343_1==KW_DOW||(LA343_1 >= KW_DUMP && LA343_1 <= KW_ELEM_TYPE)||LA343_1==KW_ENABLE||(LA343_1 >= KW_ENFORCED && LA343_1 <= KW_ESCAPED)||LA343_1==KW_EXCLUSIVE||(LA343_1 >= KW_EXPLAIN && LA343_1 <= KW_EXPRESSION)||(LA343_1 >= KW_FIELDS && LA343_1 <= KW_FIRST)||(LA343_1 >= KW_FORMAT && LA343_1 <= KW_FORMATTED)||LA343_1==KW_FUNCTIONS||(LA343_1 >= KW_HOUR && LA343_1 <= KW_IDXPROPERTIES)||(LA343_1 >= KW_INDEX && LA343_1 <= KW_INDEXES)||(LA343_1 >= KW_INPATH && LA343_1 <= KW_INPUTFORMAT)||(LA343_1 >= KW_ISOLATION && LA343_1 <= KW_JAR)||(LA343_1 >= KW_KEY && LA343_1 <= KW_LAST)||LA343_1==KW_LEVEL||(LA343_1 >= KW_LIMIT && LA343_1 <= KW_LOAD)||(LA343_1 >= KW_LOCATION && LA343_1 <= KW_LONG)||LA343_1==KW_MANAGEMENT||(LA343_1 >= KW_MAPJOIN && LA343_1 <= KW_MATERIALIZED)||LA343_1==KW_METADATA||(LA343_1 >= KW_MINUTE && LA343_1 <= KW_MONTH)||(LA343_1 >= KW_MOVE && LA343_1 <= KW_MSCK)||(LA343_1 >= KW_NORELY && LA343_1 <= KW_NOSCAN)||LA343_1==KW_NOVALIDATE||LA343_1==KW_NULLS||LA343_1==KW_OFFSET||(LA343_1 >= KW_OPERATOR && LA343_1 <= KW_OPTION)||(LA343_1 >= KW_OUTPUTDRIVER && LA343_1 <= KW_OUTPUTFORMAT)||(LA343_1 >= KW_OVERWRITE && LA343_1 <= KW_OWNER)||(LA343_1 >= KW_PARTITIONED && LA343_1 <= KW_PATH)||(LA343_1 >= KW_PLAN && LA343_1 <= KW_POOL)||LA343_1==KW_PRINCIPALS||(LA343_1 >= KW_PURGE && LA343_1 <= KW_QUERY_PARALLELISM)||LA343_1==KW_READ||(LA343_1 >= KW_REBUILD && LA343_1 <= KW_RECORDWRITER)||(LA343_1 >= KW_RELOAD && LA343_1 <= KW_RESTRICT)||LA343_1==KW_REWRITE||(LA343_1 >= KW_ROLE && LA343_1 <= KW_ROLES)||(LA343_1 >= KW_SCHEDULING_POLICY && LA343_1 <= KW_SECOND)||(LA343_1 >= KW_SEMI && LA343_1 <= KW_SERVER)||(LA343_1 >= KW_SETS && LA343_1 <= KW_SKEWED)||(LA343_1 >= KW_SNAPSHOT && LA343_1 <= KW_SSL)||(LA343_1 >= KW_STATISTICS && LA343_1 <= KW_SUMMARY)||LA343_1==KW_TABLES||(LA343_1 >= KW_TBLPROPERTIES && LA343_1 <= KW_TERMINATED)||LA343_1==KW_TINYINT||(LA343_1 >= KW_TOUCH && LA343_1 <= KW_TRANSACTIONS)||LA343_1==KW_UNARCHIVE||LA343_1==KW_UNDO||LA343_1==KW_UNIONTYPE||(LA343_1 >= KW_UNLOCK && LA343_1 <= KW_UNSIGNED)||(LA343_1 >= KW_URI && LA343_1 <= KW_USE)||(LA343_1 >= KW_UTC && LA343_1 <= KW_VALIDATE)||LA343_1==KW_VALUE_TYPE||(LA343_1 >= KW_VECTORIZATION && LA343_1 <= KW_WEEK)||LA343_1==KW_WHILE||(LA343_1 >= KW_WORK && LA343_1 <= KW_ZONE)||LA343_1==KW_BATCH||LA343_1==KW_DAYOFWEEK||LA343_1==KW_HOLD_DDLTIME||LA343_1==KW_IGNORE||LA343_1==KW_NO_DROP||LA343_1==KW_OFFLINE||LA343_1==KW_PROTECTION||LA343_1==KW_READONLY||LA343_1==KW_TIMESTAMPTZ) ) {
							alt343=1;
						}
					}
					switch (alt343) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2812:52: LPAREN targetCols= columnNameList RPAREN
							{
							LPAREN1052=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_insertClause18760); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN1052);

							pushFollow(FOLLOW_columnNameList_in_insertClause18764);
							targetCols=columnNameList();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_columnNameList.add(targetCols.getTree());
							RPAREN1053=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_insertClause18766); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN1053);

							}
							break;

					}

					// AST REWRITE
					// elements: targetCols, tableOrPartition
					// token labels: 
					// rule labels: targetCols, retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_targetCols=new RewriteRuleSubtreeStream(adaptor,"rule targetCols",targetCols!=null?targetCols.getTree():null);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2813:8: -> ^( TOK_INSERT_INTO tableOrPartition ( $targetCols)? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2813:11: ^( TOK_INSERT_INTO tableOrPartition ( $targetCols)? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT_INTO, "TOK_INSERT_INTO"), root_1);
						adaptor.addChild(root_1, stream_tableOrPartition.nextTree());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2813:47: ( $targetCols)?
						if ( stream_targetCols.hasNext() ) {
							adaptor.addChild(root_1, stream_targetCols.nextTree());
						}
						stream_targetCols.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "insertClause"


	public static class destination_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "destination"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2816:1: destination : ( (local= KW_LOCAL )? KW_DIRECTORY StringLiteral ( tableRowFormat )? ( tableFileFormat )? -> ^( TOK_DIR StringLiteral ( $local)? ( tableRowFormat )? ( tableFileFormat )? ) | KW_TABLE tableOrPartition -> tableOrPartition );
	public final HiveParser.destination_return destination() throws RecognitionException {
		HiveParser.destination_return retval = new HiveParser.destination_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token local=null;
		Token KW_DIRECTORY1054=null;
		Token StringLiteral1055=null;
		Token KW_TABLE1058=null;
		ParserRuleReturnScope tableRowFormat1056 =null;
		ParserRuleReturnScope tableFileFormat1057 =null;
		ParserRuleReturnScope tableOrPartition1059 =null;

		ASTNode local_tree=null;
		ASTNode KW_DIRECTORY1054_tree=null;
		ASTNode StringLiteral1055_tree=null;
		ASTNode KW_TABLE1058_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_DIRECTORY=new RewriteRuleTokenStream(adaptor,"token KW_DIRECTORY");
		RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
		RewriteRuleTokenStream stream_KW_LOCAL=new RewriteRuleTokenStream(adaptor,"token KW_LOCAL");
		RewriteRuleSubtreeStream stream_tableRowFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormat");
		RewriteRuleSubtreeStream stream_tableFileFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableFileFormat");
		RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");

		 pushMsg("destination specification", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2819:4: ( (local= KW_LOCAL )? KW_DIRECTORY StringLiteral ( tableRowFormat )? ( tableFileFormat )? -> ^( TOK_DIR StringLiteral ( $local)? ( tableRowFormat )? ( tableFileFormat )? ) | KW_TABLE tableOrPartition -> tableOrPartition )
			int alt348=2;
			int LA348_0 = input.LA(1);
			if ( (LA348_0==KW_DIRECTORY||LA348_0==KW_LOCAL) ) {
				alt348=1;
			}
			else if ( (LA348_0==KW_TABLE) ) {
				alt348=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 348, 0, input);
				throw nvae;
			}

			switch (alt348) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2820:6: (local= KW_LOCAL )? KW_DIRECTORY StringLiteral ( tableRowFormat )? ( tableFileFormat )?
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2820:6: (local= KW_LOCAL )?
					int alt345=2;
					int LA345_0 = input.LA(1);
					if ( (LA345_0==KW_LOCAL) ) {
						alt345=1;
					}
					switch (alt345) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2820:7: local= KW_LOCAL
							{
							local=(Token)match(input,KW_LOCAL,FOLLOW_KW_LOCAL_in_destination18822); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_LOCAL.add(local);

							}
							break;

					}

					KW_DIRECTORY1054=(Token)match(input,KW_DIRECTORY,FOLLOW_KW_DIRECTORY_in_destination18826); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DIRECTORY.add(KW_DIRECTORY1054);

					StringLiteral1055=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_destination18828); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(StringLiteral1055);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2820:53: ( tableRowFormat )?
					int alt346=2;
					int LA346_0 = input.LA(1);
					if ( (LA346_0==KW_ROW) ) {
						alt346=1;
					}
					switch (alt346) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2820:53: tableRowFormat
							{
							pushFollow(FOLLOW_tableRowFormat_in_destination18830);
							tableRowFormat1056=tableRowFormat();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableRowFormat.add(tableRowFormat1056.getTree());
							}
							break;

					}

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2820:69: ( tableFileFormat )?
					int alt347=2;
					int LA347_0 = input.LA(1);
					if ( (LA347_0==KW_STORED) ) {
						alt347=1;
					}
					switch (alt347) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2820:69: tableFileFormat
							{
							pushFollow(FOLLOW_tableFileFormat_in_destination18833);
							tableFileFormat1057=tableFileFormat();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_tableFileFormat.add(tableFileFormat1057.getTree());
							}
							break;

					}

					// AST REWRITE
					// elements: local, tableFileFormat, StringLiteral, tableRowFormat
					// token labels: local
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_local=new RewriteRuleTokenStream(adaptor,"token local",local);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2821:8: -> ^( TOK_DIR StringLiteral ( $local)? ( tableRowFormat )? ( tableFileFormat )? )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2821:11: ^( TOK_DIR StringLiteral ( $local)? ( tableRowFormat )? ( tableFileFormat )? )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DIR, "TOK_DIR"), root_1);
						adaptor.addChild(root_1, stream_StringLiteral.nextNode());
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2821:36: ( $local)?
						if ( stream_local.hasNext() ) {
							adaptor.addChild(root_1, stream_local.nextNode());
						}
						stream_local.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2821:43: ( tableRowFormat )?
						if ( stream_tableRowFormat.hasNext() ) {
							adaptor.addChild(root_1, stream_tableRowFormat.nextTree());
						}
						stream_tableRowFormat.reset();

						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2821:59: ( tableFileFormat )?
						if ( stream_tableFileFormat.hasNext() ) {
							adaptor.addChild(root_1, stream_tableFileFormat.nextTree());
						}
						stream_tableFileFormat.reset();

						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2822:6: KW_TABLE tableOrPartition
					{
					KW_TABLE1058=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_destination18866); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_TABLE.add(KW_TABLE1058);

					pushFollow(FOLLOW_tableOrPartition_in_destination18868);
					tableOrPartition1059=tableOrPartition();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_tableOrPartition.add(tableOrPartition1059.getTree());
					// AST REWRITE
					// elements: tableOrPartition
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2822:32: -> tableOrPartition
					{
						adaptor.addChild(root_0, stream_tableOrPartition.nextTree());
					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "destination"


	public static class limitClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "limitClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2825:1: limitClause : ( KW_LIMIT ( (offset= Number COMMA )? num= Number ) -> ^( TOK_LIMIT ( $offset)? $num) | KW_LIMIT num= Number KW_OFFSET offset= Number -> ^( TOK_LIMIT ( $offset)? $num) );
	public final HiveParser.limitClause_return limitClause() throws RecognitionException {
		HiveParser.limitClause_return retval = new HiveParser.limitClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token offset=null;
		Token num=null;
		Token KW_LIMIT1060=null;
		Token COMMA1061=null;
		Token KW_LIMIT1062=null;
		Token KW_OFFSET1063=null;

		ASTNode offset_tree=null;
		ASTNode num_tree=null;
		ASTNode KW_LIMIT1060_tree=null;
		ASTNode COMMA1061_tree=null;
		ASTNode KW_LIMIT1062_tree=null;
		ASTNode KW_OFFSET1063_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
		RewriteRuleTokenStream stream_KW_LIMIT=new RewriteRuleTokenStream(adaptor,"token KW_LIMIT");
		RewriteRuleTokenStream stream_KW_OFFSET=new RewriteRuleTokenStream(adaptor,"token KW_OFFSET");

		 pushMsg("limit clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2828:4: ( KW_LIMIT ( (offset= Number COMMA )? num= Number ) -> ^( TOK_LIMIT ( $offset)? $num) | KW_LIMIT num= Number KW_OFFSET offset= Number -> ^( TOK_LIMIT ( $offset)? $num) )
			int alt350=2;
			int LA350_0 = input.LA(1);
			if ( (LA350_0==KW_LIMIT) ) {
				int LA350_1 = input.LA(2);
				if ( (LA350_1==Number) ) {
					int LA350_2 = input.LA(3);
					if ( (LA350_2==KW_OFFSET) ) {
						alt350=2;
					}
					else if ( (LA350_2==EOF||LA350_2==COMMA||LA350_2==KW_EXCEPT||LA350_2==KW_INSERT||LA350_2==KW_INTERSECT||LA350_2==KW_MAP||LA350_2==KW_MINUS||LA350_2==KW_REDUCE||LA350_2==KW_SELECT||LA350_2==KW_UNION||LA350_2==RPAREN) ) {
						alt350=1;
					}

					else {
						if (state.backtracking>0) {state.failed=true; return retval;}
						int nvaeMark = input.mark();
						try {
							for (int nvaeConsume = 0; nvaeConsume < 3 - 1; nvaeConsume++) {
								input.consume();
							}
							NoViableAltException nvae =
								new NoViableAltException("", 350, 2, input);
							throw nvae;
						} finally {
							input.rewind(nvaeMark);
						}
					}

				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 350, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 350, 0, input);
				throw nvae;
			}

			switch (alt350) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2829:4: KW_LIMIT ( (offset= Number COMMA )? num= Number )
					{
					KW_LIMIT1060=(Token)match(input,KW_LIMIT,FOLLOW_KW_LIMIT_in_limitClause18900); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LIMIT.add(KW_LIMIT1060);

					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2829:13: ( (offset= Number COMMA )? num= Number )
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2829:14: (offset= Number COMMA )? num= Number
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2829:14: (offset= Number COMMA )?
					int alt349=2;
					int LA349_0 = input.LA(1);
					if ( (LA349_0==Number) ) {
						int LA349_1 = input.LA(2);
						if ( (LA349_1==COMMA) ) {
							alt349=1;
						}
					}
					switch (alt349) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2829:15: offset= Number COMMA
							{
							offset=(Token)match(input,Number,FOLLOW_Number_in_limitClause18906); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_Number.add(offset);

							COMMA1061=(Token)match(input,COMMA,FOLLOW_COMMA_in_limitClause18908); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_COMMA.add(COMMA1061);

							}
							break;

					}

					num=(Token)match(input,Number,FOLLOW_Number_in_limitClause18914); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_Number.add(num);

					}

					// AST REWRITE
					// elements: offset, num
					// token labels: offset, num
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_offset=new RewriteRuleTokenStream(adaptor,"token offset",offset);
					RewriteRuleTokenStream stream_num=new RewriteRuleTokenStream(adaptor,"token num",num);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2829:49: -> ^( TOK_LIMIT ( $offset)? $num)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2829:52: ^( TOK_LIMIT ( $offset)? $num)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_LIMIT, "TOK_LIMIT"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2829:64: ( $offset)?
						if ( stream_offset.hasNext() ) {
							adaptor.addChild(root_1, stream_offset.nextNode());
						}
						stream_offset.reset();

						adaptor.addChild(root_1, stream_num.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2830:6: KW_LIMIT num= Number KW_OFFSET offset= Number
					{
					KW_LIMIT1062=(Token)match(input,KW_LIMIT,FOLLOW_KW_LIMIT_in_limitClause18937); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_LIMIT.add(KW_LIMIT1062);

					num=(Token)match(input,Number,FOLLOW_Number_in_limitClause18941); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_Number.add(num);

					KW_OFFSET1063=(Token)match(input,KW_OFFSET,FOLLOW_KW_OFFSET_in_limitClause18943); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_OFFSET.add(KW_OFFSET1063);

					offset=(Token)match(input,Number,FOLLOW_Number_in_limitClause18947); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_Number.add(offset);

					// AST REWRITE
					// elements: num, offset
					// token labels: offset, num
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleTokenStream stream_offset=new RewriteRuleTokenStream(adaptor,"token offset",offset);
					RewriteRuleTokenStream stream_num=new RewriteRuleTokenStream(adaptor,"token num",num);
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2830:50: -> ^( TOK_LIMIT ( $offset)? $num)
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2830:53: ^( TOK_LIMIT ( $offset)? $num)
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_LIMIT, "TOK_LIMIT"), root_1);
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2830:65: ( $offset)?
						if ( stream_offset.hasNext() ) {
							adaptor.addChild(root_1, stream_offset.nextNode());
						}
						stream_offset.reset();

						adaptor.addChild(root_1, stream_num.nextNode());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "limitClause"


	public static class deleteStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "deleteStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2834:1: deleteStatement : KW_DELETE KW_FROM tableName ( whereClause )? -> ^( TOK_DELETE_FROM tableName ( whereClause )? ) ;
	public final HiveParser.deleteStatement_return deleteStatement() throws RecognitionException {
		HiveParser.deleteStatement_return retval = new HiveParser.deleteStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_DELETE1064=null;
		Token KW_FROM1065=null;
		ParserRuleReturnScope tableName1066 =null;
		ParserRuleReturnScope whereClause1067 =null;

		ASTNode KW_DELETE1064_tree=null;
		ASTNode KW_FROM1065_tree=null;
		RewriteRuleTokenStream stream_KW_DELETE=new RewriteRuleTokenStream(adaptor,"token KW_DELETE");
		RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
		RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("delete statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2837:4: ( KW_DELETE KW_FROM tableName ( whereClause )? -> ^( TOK_DELETE_FROM tableName ( whereClause )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2838:4: KW_DELETE KW_FROM tableName ( whereClause )?
			{
			KW_DELETE1064=(Token)match(input,KW_DELETE,FOLLOW_KW_DELETE_in_deleteStatement18991); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_DELETE.add(KW_DELETE1064);

			KW_FROM1065=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_deleteStatement18993); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_FROM.add(KW_FROM1065);

			pushFollow(FOLLOW_tableName_in_deleteStatement18995);
			tableName1066=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName1066.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2838:32: ( whereClause )?
			int alt351=2;
			int LA351_0 = input.LA(1);
			if ( (LA351_0==KW_WHERE) ) {
				alt351=1;
			}
			switch (alt351) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2838:33: whereClause
					{
					pushFollow(FOLLOW_whereClause_in_deleteStatement18998);
					whereClause1067=whereClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_whereClause.add(whereClause1067.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: tableName, whereClause
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2838:47: -> ^( TOK_DELETE_FROM tableName ( whereClause )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2838:50: ^( TOK_DELETE_FROM tableName ( whereClause )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_DELETE_FROM, "TOK_DELETE_FROM"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2838:78: ( whereClause )?
				if ( stream_whereClause.hasNext() ) {
					adaptor.addChild(root_1, stream_whereClause.nextTree());
				}
				stream_whereClause.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "deleteStatement"


	public static class columnAssignmentClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "columnAssignmentClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2842:1: columnAssignmentClause : tableOrColumn EQUAL ^ precedencePlusExpression ;
	public final HiveParser.columnAssignmentClause_return columnAssignmentClause() throws RecognitionException {
		HiveParser.columnAssignmentClause_return retval = new HiveParser.columnAssignmentClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token EQUAL1069=null;
		ParserRuleReturnScope tableOrColumn1068 =null;
		ParserRuleReturnScope precedencePlusExpression1070 =null;

		ASTNode EQUAL1069_tree=null;

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2843:4: ( tableOrColumn EQUAL ^ precedencePlusExpression )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2844:4: tableOrColumn EQUAL ^ precedencePlusExpression
			{
			root_0 = (ASTNode)adaptor.nil();


			pushFollow(FOLLOW_tableOrColumn_in_columnAssignmentClause19031);
			tableOrColumn1068=tableOrColumn();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, tableOrColumn1068.getTree());

			EQUAL1069=(Token)match(input,EQUAL,FOLLOW_EQUAL_in_columnAssignmentClause19033); if (state.failed) return retval;
			if ( state.backtracking==0 ) {
			EQUAL1069_tree = (ASTNode)adaptor.create(EQUAL1069);
			root_0 = (ASTNode)adaptor.becomeRoot(EQUAL1069_tree, root_0);
			}

			pushFollow(FOLLOW_precedencePlusExpression_in_columnAssignmentClause19036);
			precedencePlusExpression1070=precedencePlusExpression();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) adaptor.addChild(root_0, precedencePlusExpression1070.getTree());

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "columnAssignmentClause"


	public static class setColumnsClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "setColumnsClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2848:1: setColumnsClause : KW_SET columnAssignmentClause ( COMMA columnAssignmentClause )* -> ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* ) ;
	public final HiveParser.setColumnsClause_return setColumnsClause() throws RecognitionException {
		HiveParser.setColumnsClause_return retval = new HiveParser.setColumnsClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET1071=null;
		Token COMMA1073=null;
		ParserRuleReturnScope columnAssignmentClause1072 =null;
		ParserRuleReturnScope columnAssignmentClause1074 =null;

		ASTNode KW_SET1071_tree=null;
		ASTNode COMMA1073_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_columnAssignmentClause=new RewriteRuleSubtreeStream(adaptor,"rule columnAssignmentClause");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2849:4: ( KW_SET columnAssignmentClause ( COMMA columnAssignmentClause )* -> ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2850:4: KW_SET columnAssignmentClause ( COMMA columnAssignmentClause )*
			{
			KW_SET1071=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_setColumnsClause19056); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET1071);

			pushFollow(FOLLOW_columnAssignmentClause_in_setColumnsClause19058);
			columnAssignmentClause1072=columnAssignmentClause();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_columnAssignmentClause.add(columnAssignmentClause1072.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2850:34: ( COMMA columnAssignmentClause )*
			loop352:
			while (true) {
				int alt352=2;
				int LA352_0 = input.LA(1);
				if ( (LA352_0==COMMA) ) {
					alt352=1;
				}

				switch (alt352) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2850:35: COMMA columnAssignmentClause
					{
					COMMA1073=(Token)match(input,COMMA,FOLLOW_COMMA_in_setColumnsClause19061); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_COMMA.add(COMMA1073);

					pushFollow(FOLLOW_columnAssignmentClause_in_setColumnsClause19063);
					columnAssignmentClause1074=columnAssignmentClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_columnAssignmentClause.add(columnAssignmentClause1074.getTree());
					}
					break;

				default :
					break loop352;
				}
			}

			// AST REWRITE
			// elements: columnAssignmentClause
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2850:66: -> ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2850:69: ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SET_COLUMNS_CLAUSE, "TOK_SET_COLUMNS_CLAUSE"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2850:94: ( columnAssignmentClause )*
				while ( stream_columnAssignmentClause.hasNext() ) {
					adaptor.addChild(root_1, stream_columnAssignmentClause.nextTree());
				}
				stream_columnAssignmentClause.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "setColumnsClause"


	public static class updateStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "updateStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2857:1: updateStatement : KW_UPDATE tableName setColumnsClause ( whereClause )? -> ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? ) ;
	public final HiveParser.updateStatement_return updateStatement() throws RecognitionException {
		HiveParser.updateStatement_return retval = new HiveParser.updateStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UPDATE1075=null;
		ParserRuleReturnScope tableName1076 =null;
		ParserRuleReturnScope setColumnsClause1077 =null;
		ParserRuleReturnScope whereClause1078 =null;

		ASTNode KW_UPDATE1075_tree=null;
		RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
		RewriteRuleSubtreeStream stream_setColumnsClause=new RewriteRuleSubtreeStream(adaptor,"rule setColumnsClause");
		RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");

		 pushMsg("update statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2860:4: ( KW_UPDATE tableName setColumnsClause ( whereClause )? -> ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2861:4: KW_UPDATE tableName setColumnsClause ( whereClause )?
			{
			KW_UPDATE1075=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_updateStatement19105); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_UPDATE.add(KW_UPDATE1075);

			pushFollow(FOLLOW_tableName_in_updateStatement19107);
			tableName1076=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName1076.getTree());
			pushFollow(FOLLOW_setColumnsClause_in_updateStatement19109);
			setColumnsClause1077=setColumnsClause();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_setColumnsClause.add(setColumnsClause1077.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2861:41: ( whereClause )?
			int alt353=2;
			int LA353_0 = input.LA(1);
			if ( (LA353_0==KW_WHERE) ) {
				alt353=1;
			}
			switch (alt353) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2861:41: whereClause
					{
					pushFollow(FOLLOW_whereClause_in_updateStatement19111);
					whereClause1078=whereClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_whereClause.add(whereClause1078.getTree());
					}
					break;

			}

			// AST REWRITE
			// elements: tableName, setColumnsClause, whereClause
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2861:54: -> ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2861:57: ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UPDATE_TABLE, "TOK_UPDATE_TABLE"), root_1);
				adaptor.addChild(root_1, stream_tableName.nextTree());
				adaptor.addChild(root_1, stream_setColumnsClause.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2861:103: ( whereClause )?
				if ( stream_whereClause.hasNext() ) {
					adaptor.addChild(root_1, stream_whereClause.nextTree());
				}
				stream_whereClause.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "updateStatement"


	public static class sqlTransactionStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "sqlTransactionStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2868:1: sqlTransactionStatement : ( startTransactionStatement | commitStatement | rollbackStatement | setAutoCommitStatement );
	public final HiveParser.sqlTransactionStatement_return sqlTransactionStatement() throws RecognitionException {
		HiveParser.sqlTransactionStatement_return retval = new HiveParser.sqlTransactionStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope startTransactionStatement1079 =null;
		ParserRuleReturnScope commitStatement1080 =null;
		ParserRuleReturnScope rollbackStatement1081 =null;
		ParserRuleReturnScope setAutoCommitStatement1082 =null;


		 pushMsg("transaction statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2871:3: ( startTransactionStatement | commitStatement | rollbackStatement | setAutoCommitStatement )
			int alt354=4;
			switch ( input.LA(1) ) {
			case KW_START:
				{
				alt354=1;
				}
				break;
			case KW_COMMIT:
				{
				alt354=2;
				}
				break;
			case KW_ROLLBACK:
				{
				alt354=3;
				}
				break;
			case KW_SET:
				{
				alt354=4;
				}
				break;
			default:
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 354, 0, input);
				throw nvae;
			}
			switch (alt354) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2872:3: startTransactionStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_startTransactionStatement_in_sqlTransactionStatement19153);
					startTransactionStatement1079=startTransactionStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, startTransactionStatement1079.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2873:4: commitStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_commitStatement_in_sqlTransactionStatement19158);
					commitStatement1080=commitStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, commitStatement1080.getTree());

					}
					break;
				case 3 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2874:4: rollbackStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_rollbackStatement_in_sqlTransactionStatement19163);
					rollbackStatement1081=rollbackStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, rollbackStatement1081.getTree());

					}
					break;
				case 4 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2875:4: setAutoCommitStatement
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_setAutoCommitStatement_in_sqlTransactionStatement19168);
					setAutoCommitStatement1082=setAutoCommitStatement();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, setAutoCommitStatement1082.getTree());

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "sqlTransactionStatement"


	public static class startTransactionStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "startTransactionStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2878:1: startTransactionStatement : KW_START KW_TRANSACTION ( transactionMode ( COMMA transactionMode )* )? -> ^( TOK_START_TRANSACTION ( transactionMode )* ) ;
	public final HiveParser.startTransactionStatement_return startTransactionStatement() throws RecognitionException {
		HiveParser.startTransactionStatement_return retval = new HiveParser.startTransactionStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_START1083=null;
		Token KW_TRANSACTION1084=null;
		Token COMMA1086=null;
		ParserRuleReturnScope transactionMode1085 =null;
		ParserRuleReturnScope transactionMode1087 =null;

		ASTNode KW_START1083_tree=null;
		ASTNode KW_TRANSACTION1084_tree=null;
		ASTNode COMMA1086_tree=null;
		RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
		RewriteRuleTokenStream stream_KW_START=new RewriteRuleTokenStream(adaptor,"token KW_START");
		RewriteRuleTokenStream stream_KW_TRANSACTION=new RewriteRuleTokenStream(adaptor,"token KW_TRANSACTION");
		RewriteRuleSubtreeStream stream_transactionMode=new RewriteRuleSubtreeStream(adaptor,"rule transactionMode");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2879:3: ( KW_START KW_TRANSACTION ( transactionMode ( COMMA transactionMode )* )? -> ^( TOK_START_TRANSACTION ( transactionMode )* ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2880:3: KW_START KW_TRANSACTION ( transactionMode ( COMMA transactionMode )* )?
			{
			KW_START1083=(Token)match(input,KW_START,FOLLOW_KW_START_in_startTransactionStatement19182); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_START.add(KW_START1083);

			KW_TRANSACTION1084=(Token)match(input,KW_TRANSACTION,FOLLOW_KW_TRANSACTION_in_startTransactionStatement19184); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TRANSACTION.add(KW_TRANSACTION1084);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2880:27: ( transactionMode ( COMMA transactionMode )* )?
			int alt356=2;
			int LA356_0 = input.LA(1);
			if ( (LA356_0==KW_ISOLATION||LA356_0==KW_READ) ) {
				alt356=1;
			}
			switch (alt356) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2880:29: transactionMode ( COMMA transactionMode )*
					{
					pushFollow(FOLLOW_transactionMode_in_startTransactionStatement19188);
					transactionMode1085=transactionMode();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_transactionMode.add(transactionMode1085.getTree());
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2880:46: ( COMMA transactionMode )*
					loop355:
					while (true) {
						int alt355=2;
						int LA355_0 = input.LA(1);
						if ( (LA355_0==COMMA) ) {
							alt355=1;
						}

						switch (alt355) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2880:48: COMMA transactionMode
							{
							COMMA1086=(Token)match(input,COMMA,FOLLOW_COMMA_in_startTransactionStatement19193); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_COMMA.add(COMMA1086);

							pushFollow(FOLLOW_transactionMode_in_startTransactionStatement19195);
							transactionMode1087=transactionMode();
							state._fsp--;
							if (state.failed) return retval;
							if ( state.backtracking==0 ) stream_transactionMode.add(transactionMode1087.getTree());
							}
							break;

						default :
							break loop355;
						}
					}

					}
					break;

			}

			// AST REWRITE
			// elements: transactionMode
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2880:77: -> ^( TOK_START_TRANSACTION ( transactionMode )* )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2880:80: ^( TOK_START_TRANSACTION ( transactionMode )* )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_START_TRANSACTION, "TOK_START_TRANSACTION"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2880:104: ( transactionMode )*
				while ( stream_transactionMode.hasNext() ) {
					adaptor.addChild(root_1, stream_transactionMode.nextTree());
				}
				stream_transactionMode.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "startTransactionStatement"


	public static class transactionMode_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "transactionMode"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2883:1: transactionMode : ( isolationLevel | transactionAccessMode -> ^( TOK_TXN_ACCESS_MODE transactionAccessMode ) );
	public final HiveParser.transactionMode_return transactionMode() throws RecognitionException {
		HiveParser.transactionMode_return retval = new HiveParser.transactionMode_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope isolationLevel1088 =null;
		ParserRuleReturnScope transactionAccessMode1089 =null;

		RewriteRuleSubtreeStream stream_transactionAccessMode=new RewriteRuleSubtreeStream(adaptor,"rule transactionAccessMode");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2884:3: ( isolationLevel | transactionAccessMode -> ^( TOK_TXN_ACCESS_MODE transactionAccessMode ) )
			int alt357=2;
			int LA357_0 = input.LA(1);
			if ( (LA357_0==KW_ISOLATION) ) {
				alt357=1;
			}
			else if ( (LA357_0==KW_READ) ) {
				alt357=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 357, 0, input);
				throw nvae;
			}

			switch (alt357) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2885:3: isolationLevel
					{
					root_0 = (ASTNode)adaptor.nil();


					pushFollow(FOLLOW_isolationLevel_in_transactionMode19226);
					isolationLevel1088=isolationLevel();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, isolationLevel1088.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2886:5: transactionAccessMode
					{
					pushFollow(FOLLOW_transactionAccessMode_in_transactionMode19232);
					transactionAccessMode1089=transactionAccessMode();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_transactionAccessMode.add(transactionAccessMode1089.getTree());
					// AST REWRITE
					// elements: transactionAccessMode
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2886:27: -> ^( TOK_TXN_ACCESS_MODE transactionAccessMode )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2886:30: ^( TOK_TXN_ACCESS_MODE transactionAccessMode )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TXN_ACCESS_MODE, "TOK_TXN_ACCESS_MODE"), root_1);
						adaptor.addChild(root_1, stream_transactionAccessMode.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "transactionMode"


	public static class transactionAccessMode_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "transactionAccessMode"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2889:1: transactionAccessMode : ( KW_READ KW_ONLY -> TOK_TXN_READ_ONLY | KW_READ KW_WRITE -> TOK_TXN_READ_WRITE );
	public final HiveParser.transactionAccessMode_return transactionAccessMode() throws RecognitionException {
		HiveParser.transactionAccessMode_return retval = new HiveParser.transactionAccessMode_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_READ1090=null;
		Token KW_ONLY1091=null;
		Token KW_READ1092=null;
		Token KW_WRITE1093=null;

		ASTNode KW_READ1090_tree=null;
		ASTNode KW_ONLY1091_tree=null;
		ASTNode KW_READ1092_tree=null;
		ASTNode KW_WRITE1093_tree=null;
		RewriteRuleTokenStream stream_KW_READ=new RewriteRuleTokenStream(adaptor,"token KW_READ");
		RewriteRuleTokenStream stream_KW_ONLY=new RewriteRuleTokenStream(adaptor,"token KW_ONLY");
		RewriteRuleTokenStream stream_KW_WRITE=new RewriteRuleTokenStream(adaptor,"token KW_WRITE");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2890:3: ( KW_READ KW_ONLY -> TOK_TXN_READ_ONLY | KW_READ KW_WRITE -> TOK_TXN_READ_WRITE )
			int alt358=2;
			int LA358_0 = input.LA(1);
			if ( (LA358_0==KW_READ) ) {
				int LA358_1 = input.LA(2);
				if ( (LA358_1==KW_ONLY) ) {
					alt358=1;
				}
				else if ( (LA358_1==KW_WRITE) ) {
					alt358=2;
				}

				else {
					if (state.backtracking>0) {state.failed=true; return retval;}
					int nvaeMark = input.mark();
					try {
						input.consume();
						NoViableAltException nvae =
							new NoViableAltException("", 358, 1, input);
						throw nvae;
					} finally {
						input.rewind(nvaeMark);
					}
				}

			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 358, 0, input);
				throw nvae;
			}

			switch (alt358) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2891:3: KW_READ KW_ONLY
					{
					KW_READ1090=(Token)match(input,KW_READ,FOLLOW_KW_READ_in_transactionAccessMode19255); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_READ.add(KW_READ1090);

					KW_ONLY1091=(Token)match(input,KW_ONLY,FOLLOW_KW_ONLY_in_transactionAccessMode19257); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_ONLY.add(KW_ONLY1091);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2891:19: -> TOK_TXN_READ_ONLY
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_TXN_READ_ONLY, "TOK_TXN_READ_ONLY"));
					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2892:5: KW_READ KW_WRITE
					{
					KW_READ1092=(Token)match(input,KW_READ,FOLLOW_KW_READ_in_transactionAccessMode19267); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_READ.add(KW_READ1092);

					KW_WRITE1093=(Token)match(input,KW_WRITE,FOLLOW_KW_WRITE_in_transactionAccessMode19269); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WRITE.add(KW_WRITE1093);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2892:22: -> TOK_TXN_READ_WRITE
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_TXN_READ_WRITE, "TOK_TXN_READ_WRITE"));
					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "transactionAccessMode"


	public static class isolationLevel_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "isolationLevel"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2895:1: isolationLevel : KW_ISOLATION KW_LEVEL levelOfIsolation -> ^( TOK_ISOLATION_LEVEL levelOfIsolation ) ;
	public final HiveParser.isolationLevel_return isolationLevel() throws RecognitionException {
		HiveParser.isolationLevel_return retval = new HiveParser.isolationLevel_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ISOLATION1094=null;
		Token KW_LEVEL1095=null;
		ParserRuleReturnScope levelOfIsolation1096 =null;

		ASTNode KW_ISOLATION1094_tree=null;
		ASTNode KW_LEVEL1095_tree=null;
		RewriteRuleTokenStream stream_KW_LEVEL=new RewriteRuleTokenStream(adaptor,"token KW_LEVEL");
		RewriteRuleTokenStream stream_KW_ISOLATION=new RewriteRuleTokenStream(adaptor,"token KW_ISOLATION");
		RewriteRuleSubtreeStream stream_levelOfIsolation=new RewriteRuleSubtreeStream(adaptor,"rule levelOfIsolation");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2896:3: ( KW_ISOLATION KW_LEVEL levelOfIsolation -> ^( TOK_ISOLATION_LEVEL levelOfIsolation ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2897:3: KW_ISOLATION KW_LEVEL levelOfIsolation
			{
			KW_ISOLATION1094=(Token)match(input,KW_ISOLATION,FOLLOW_KW_ISOLATION_in_isolationLevel19288); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ISOLATION.add(KW_ISOLATION1094);

			KW_LEVEL1095=(Token)match(input,KW_LEVEL,FOLLOW_KW_LEVEL_in_isolationLevel19290); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_LEVEL.add(KW_LEVEL1095);

			pushFollow(FOLLOW_levelOfIsolation_in_isolationLevel19292);
			levelOfIsolation1096=levelOfIsolation();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_levelOfIsolation.add(levelOfIsolation1096.getTree());
			// AST REWRITE
			// elements: levelOfIsolation
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2897:42: -> ^( TOK_ISOLATION_LEVEL levelOfIsolation )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2897:45: ^( TOK_ISOLATION_LEVEL levelOfIsolation )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ISOLATION_LEVEL, "TOK_ISOLATION_LEVEL"), root_1);
				adaptor.addChild(root_1, stream_levelOfIsolation.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "isolationLevel"


	public static class levelOfIsolation_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "levelOfIsolation"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2901:1: levelOfIsolation : KW_SNAPSHOT -> TOK_ISOLATION_SNAPSHOT ;
	public final HiveParser.levelOfIsolation_return levelOfIsolation() throws RecognitionException {
		HiveParser.levelOfIsolation_return retval = new HiveParser.levelOfIsolation_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SNAPSHOT1097=null;

		ASTNode KW_SNAPSHOT1097_tree=null;
		RewriteRuleTokenStream stream_KW_SNAPSHOT=new RewriteRuleTokenStream(adaptor,"token KW_SNAPSHOT");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2902:3: ( KW_SNAPSHOT -> TOK_ISOLATION_SNAPSHOT )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2903:3: KW_SNAPSHOT
			{
			KW_SNAPSHOT1097=(Token)match(input,KW_SNAPSHOT,FOLLOW_KW_SNAPSHOT_in_levelOfIsolation19317); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SNAPSHOT.add(KW_SNAPSHOT1097);

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2903:15: -> TOK_ISOLATION_SNAPSHOT
			{
				adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_ISOLATION_SNAPSHOT, "TOK_ISOLATION_SNAPSHOT"));
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "levelOfIsolation"


	public static class commitStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "commitStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2906:1: commitStatement : KW_COMMIT ( KW_WORK )? -> TOK_COMMIT ;
	public final HiveParser.commitStatement_return commitStatement() throws RecognitionException {
		HiveParser.commitStatement_return retval = new HiveParser.commitStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_COMMIT1098=null;
		Token KW_WORK1099=null;

		ASTNode KW_COMMIT1098_tree=null;
		ASTNode KW_WORK1099_tree=null;
		RewriteRuleTokenStream stream_KW_WORK=new RewriteRuleTokenStream(adaptor,"token KW_WORK");
		RewriteRuleTokenStream stream_KW_COMMIT=new RewriteRuleTokenStream(adaptor,"token KW_COMMIT");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2907:3: ( KW_COMMIT ( KW_WORK )? -> TOK_COMMIT )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2908:3: KW_COMMIT ( KW_WORK )?
			{
			KW_COMMIT1098=(Token)match(input,KW_COMMIT,FOLLOW_KW_COMMIT_in_commitStatement19336); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_COMMIT.add(KW_COMMIT1098);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2908:13: ( KW_WORK )?
			int alt359=2;
			int LA359_0 = input.LA(1);
			if ( (LA359_0==KW_WORK) ) {
				alt359=1;
			}
			switch (alt359) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2908:15: KW_WORK
					{
					KW_WORK1099=(Token)match(input,KW_WORK,FOLLOW_KW_WORK_in_commitStatement19340); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WORK.add(KW_WORK1099);

					}
					break;

			}

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2908:26: -> TOK_COMMIT
			{
				adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_COMMIT, "TOK_COMMIT"));
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "commitStatement"


	public static class rollbackStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "rollbackStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2911:1: rollbackStatement : KW_ROLLBACK ( KW_WORK )? -> TOK_ROLLBACK ;
	public final HiveParser.rollbackStatement_return rollbackStatement() throws RecognitionException {
		HiveParser.rollbackStatement_return retval = new HiveParser.rollbackStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ROLLBACK1100=null;
		Token KW_WORK1101=null;

		ASTNode KW_ROLLBACK1100_tree=null;
		ASTNode KW_WORK1101_tree=null;
		RewriteRuleTokenStream stream_KW_ROLLBACK=new RewriteRuleTokenStream(adaptor,"token KW_ROLLBACK");
		RewriteRuleTokenStream stream_KW_WORK=new RewriteRuleTokenStream(adaptor,"token KW_WORK");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2912:3: ( KW_ROLLBACK ( KW_WORK )? -> TOK_ROLLBACK )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2913:3: KW_ROLLBACK ( KW_WORK )?
			{
			KW_ROLLBACK1100=(Token)match(input,KW_ROLLBACK,FOLLOW_KW_ROLLBACK_in_rollbackStatement19362); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ROLLBACK.add(KW_ROLLBACK1100);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2913:15: ( KW_WORK )?
			int alt360=2;
			int LA360_0 = input.LA(1);
			if ( (LA360_0==KW_WORK) ) {
				alt360=1;
			}
			switch (alt360) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2913:17: KW_WORK
					{
					KW_WORK1101=(Token)match(input,KW_WORK,FOLLOW_KW_WORK_in_rollbackStatement19366); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_WORK.add(KW_WORK1101);

					}
					break;

			}

			// AST REWRITE
			// elements: 
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2913:28: -> TOK_ROLLBACK
			{
				adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_ROLLBACK, "TOK_ROLLBACK"));
			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "rollbackStatement"


	public static class setAutoCommitStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "setAutoCommitStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2915:1: setAutoCommitStatement : KW_SET KW_AUTOCOMMIT booleanValueTok -> ^( TOK_SET_AUTOCOMMIT booleanValueTok ) ;
	public final HiveParser.setAutoCommitStatement_return setAutoCommitStatement() throws RecognitionException {
		HiveParser.setAutoCommitStatement_return retval = new HiveParser.setAutoCommitStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_SET1102=null;
		Token KW_AUTOCOMMIT1103=null;
		ParserRuleReturnScope booleanValueTok1104 =null;

		ASTNode KW_SET1102_tree=null;
		ASTNode KW_AUTOCOMMIT1103_tree=null;
		RewriteRuleTokenStream stream_KW_AUTOCOMMIT=new RewriteRuleTokenStream(adaptor,"token KW_AUTOCOMMIT");
		RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
		RewriteRuleSubtreeStream stream_booleanValueTok=new RewriteRuleSubtreeStream(adaptor,"rule booleanValueTok");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2916:3: ( KW_SET KW_AUTOCOMMIT booleanValueTok -> ^( TOK_SET_AUTOCOMMIT booleanValueTok ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2917:3: KW_SET KW_AUTOCOMMIT booleanValueTok
			{
			KW_SET1102=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_setAutoCommitStatement19387); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_SET.add(KW_SET1102);

			KW_AUTOCOMMIT1103=(Token)match(input,KW_AUTOCOMMIT,FOLLOW_KW_AUTOCOMMIT_in_setAutoCommitStatement19389); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AUTOCOMMIT.add(KW_AUTOCOMMIT1103);

			pushFollow(FOLLOW_booleanValueTok_in_setAutoCommitStatement19391);
			booleanValueTok1104=booleanValueTok();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_booleanValueTok.add(booleanValueTok1104.getTree());
			// AST REWRITE
			// elements: booleanValueTok
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2917:40: -> ^( TOK_SET_AUTOCOMMIT booleanValueTok )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2917:43: ^( TOK_SET_AUTOCOMMIT booleanValueTok )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_SET_AUTOCOMMIT, "TOK_SET_AUTOCOMMIT"), root_1);
				adaptor.addChild(root_1, stream_booleanValueTok.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "setAutoCommitStatement"


	public static class abortTransactionStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "abortTransactionStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2923:1: abortTransactionStatement : KW_ABORT KW_TRANSACTIONS ( Number )+ -> ^( TOK_ABORT_TRANSACTIONS ( Number )+ ) ;
	public final HiveParser.abortTransactionStatement_return abortTransactionStatement() throws RecognitionException {
		HiveParser.abortTransactionStatement_return retval = new HiveParser.abortTransactionStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_ABORT1105=null;
		Token KW_TRANSACTIONS1106=null;
		Token Number1107=null;

		ASTNode KW_ABORT1105_tree=null;
		ASTNode KW_TRANSACTIONS1106_tree=null;
		ASTNode Number1107_tree=null;
		RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
		RewriteRuleTokenStream stream_KW_TRANSACTIONS=new RewriteRuleTokenStream(adaptor,"token KW_TRANSACTIONS");
		RewriteRuleTokenStream stream_KW_ABORT=new RewriteRuleTokenStream(adaptor,"token KW_ABORT");

		 pushMsg("abort transactions statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2926:3: ( KW_ABORT KW_TRANSACTIONS ( Number )+ -> ^( TOK_ABORT_TRANSACTIONS ( Number )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2927:3: KW_ABORT KW_TRANSACTIONS ( Number )+
			{
			KW_ABORT1105=(Token)match(input,KW_ABORT,FOLLOW_KW_ABORT_in_abortTransactionStatement19426); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ABORT.add(KW_ABORT1105);

			KW_TRANSACTIONS1106=(Token)match(input,KW_TRANSACTIONS,FOLLOW_KW_TRANSACTIONS_in_abortTransactionStatement19428); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_TRANSACTIONS.add(KW_TRANSACTIONS1106);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2927:28: ( Number )+
			int cnt361=0;
			loop361:
			while (true) {
				int alt361=2;
				int LA361_0 = input.LA(1);
				if ( (LA361_0==Number) ) {
					alt361=1;
				}

				switch (alt361) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2927:30: Number
					{
					Number1107=(Token)match(input,Number,FOLLOW_Number_in_abortTransactionStatement19432); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_Number.add(Number1107);

					}
					break;

				default :
					if ( cnt361 >= 1 ) break loop361;
					if (state.backtracking>0) {state.failed=true; return retval;}
					EarlyExitException eee = new EarlyExitException(361, input);
					throw eee;
				}
				cnt361++;
			}

			// AST REWRITE
			// elements: Number
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2927:40: -> ^( TOK_ABORT_TRANSACTIONS ( Number )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2927:43: ^( TOK_ABORT_TRANSACTIONS ( Number )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_ABORT_TRANSACTIONS, "TOK_ABORT_TRANSACTIONS"), root_1);
				if ( !(stream_Number.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_Number.hasNext() ) {
					adaptor.addChild(root_1, stream_Number.nextNode());
				}
				stream_Number.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "abortTransactionStatement"


	public static class mergeStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "mergeStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2934:1: mergeStatement : KW_MERGE KW_INTO tableName ( ( KW_AS )? identifier )? KW_USING joinSourcePart KW_ON expression whenClauses -> ^( TOK_MERGE ^( TOK_TABREF tableName ( identifier )? ) joinSourcePart expression whenClauses ) ;
	public final HiveParser.mergeStatement_return mergeStatement() throws RecognitionException {
		HiveParser.mergeStatement_return retval = new HiveParser.mergeStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_MERGE1108=null;
		Token KW_INTO1109=null;
		Token KW_AS1111=null;
		Token KW_USING1113=null;
		Token KW_ON1115=null;
		ParserRuleReturnScope tableName1110 =null;
		ParserRuleReturnScope identifier1112 =null;
		ParserRuleReturnScope joinSourcePart1114 =null;
		ParserRuleReturnScope expression1116 =null;
		ParserRuleReturnScope whenClauses1117 =null;

		ASTNode KW_MERGE1108_tree=null;
		ASTNode KW_INTO1109_tree=null;
		ASTNode KW_AS1111_tree=null;
		ASTNode KW_USING1113_tree=null;
		ASTNode KW_ON1115_tree=null;
		RewriteRuleTokenStream stream_KW_MERGE=new RewriteRuleTokenStream(adaptor,"token KW_MERGE");
		RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
		RewriteRuleTokenStream stream_KW_USING=new RewriteRuleTokenStream(adaptor,"token KW_USING");
		RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
		RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
		RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
		RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");
		RewriteRuleSubtreeStream stream_whenClauses=new RewriteRuleSubtreeStream(adaptor,"rule whenClauses");
		RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
		RewriteRuleSubtreeStream stream_joinSourcePart=new RewriteRuleSubtreeStream(adaptor,"rule joinSourcePart");

		 pushMsg("MERGE statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2937:4: ( KW_MERGE KW_INTO tableName ( ( KW_AS )? identifier )? KW_USING joinSourcePart KW_ON expression whenClauses -> ^( TOK_MERGE ^( TOK_TABREF tableName ( identifier )? ) joinSourcePart expression whenClauses ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2938:4: KW_MERGE KW_INTO tableName ( ( KW_AS )? identifier )? KW_USING joinSourcePart KW_ON expression whenClauses
			{
			KW_MERGE1108=(Token)match(input,KW_MERGE,FOLLOW_KW_MERGE_in_mergeStatement19478); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MERGE.add(KW_MERGE1108);

			KW_INTO1109=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_mergeStatement19480); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_INTO.add(KW_INTO1109);

			pushFollow(FOLLOW_tableName_in_mergeStatement19482);
			tableName1110=tableName();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_tableName.add(tableName1110.getTree());
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2938:31: ( ( KW_AS )? identifier )?
			int alt363=2;
			int LA363_0 = input.LA(1);
			if ( (LA363_0==Identifier||(LA363_0 >= KW_ABORT && LA363_0 <= KW_AFTER)||LA363_0==KW_ALLOC_FRACTION||LA363_0==KW_ANALYZE||LA363_0==KW_ARCHIVE||(LA363_0 >= KW_AS && LA363_0 <= KW_ASC)||(LA363_0 >= KW_AUTOCOMMIT && LA363_0 <= KW_BEFORE)||(LA363_0 >= KW_BUCKET && LA363_0 <= KW_BUCKETS)||(LA363_0 >= KW_CACHE && LA363_0 <= KW_CASCADE)||LA363_0==KW_CHANGE||(LA363_0 >= KW_CHECK && LA363_0 <= KW_COLLECTION)||(LA363_0 >= KW_COLUMNS && LA363_0 <= KW_COMMENT)||(LA363_0 >= KW_COMPACT && LA363_0 <= KW_CONCATENATE)||LA363_0==KW_CONTINUE||LA363_0==KW_DATA||LA363_0==KW_DATABASES||(LA363_0 >= KW_DATETIME && LA363_0 <= KW_DBPROPERTIES)||(LA363_0 >= KW_DEFAULT && LA363_0 <= KW_DEFINED)||(LA363_0 >= KW_DELIMITED && LA363_0 <= KW_DESC)||(LA363_0 >= KW_DETAIL && LA363_0 <= KW_DISABLE)||(LA363_0 >= KW_DISTRIBUTE && LA363_0 <= KW_DO)||LA363_0==KW_DOW||(LA363_0 >= KW_DUMP && LA363_0 <= KW_ELEM_TYPE)||LA363_0==KW_ENABLE||(LA363_0 >= KW_ENFORCED && LA363_0 <= KW_ESCAPED)||LA363_0==KW_EXCLUSIVE||(LA363_0 >= KW_EXPLAIN && LA363_0 <= KW_EXPRESSION)||(LA363_0 >= KW_FIELDS && LA363_0 <= KW_FIRST)||(LA363_0 >= KW_FORMAT && LA363_0 <= KW_FORMATTED)||LA363_0==KW_FUNCTIONS||(LA363_0 >= KW_HOUR && LA363_0 <= KW_IDXPROPERTIES)||(LA363_0 >= KW_INDEX && LA363_0 <= KW_INDEXES)||(LA363_0 >= KW_INPATH && LA363_0 <= KW_INPUTFORMAT)||(LA363_0 >= KW_ISOLATION && LA363_0 <= KW_JAR)||(LA363_0 >= KW_KEY && LA363_0 <= KW_LAST)||LA363_0==KW_LEVEL||(LA363_0 >= KW_LIMIT && LA363_0 <= KW_LOAD)||(LA363_0 >= KW_LOCATION && LA363_0 <= KW_LONG)||LA363_0==KW_MANAGEMENT||(LA363_0 >= KW_MAPJOIN && LA363_0 <= KW_MATERIALIZED)||LA363_0==KW_METADATA||(LA363_0 >= KW_MINUTE && LA363_0 <= KW_MONTH)||(LA363_0 >= KW_MOVE && LA363_0 <= KW_MSCK)||(LA363_0 >= KW_NORELY && LA363_0 <= KW_NOSCAN)||LA363_0==KW_NOVALIDATE||LA363_0==KW_NULLS||LA363_0==KW_OFFSET||(LA363_0 >= KW_OPERATOR && LA363_0 <= KW_OPTION)||(LA363_0 >= KW_OUTPUTDRIVER && LA363_0 <= KW_OUTPUTFORMAT)||(LA363_0 >= KW_OVERWRITE && LA363_0 <= KW_OWNER)||(LA363_0 >= KW_PARTITIONED && LA363_0 <= KW_PATH)||(LA363_0 >= KW_PLAN && LA363_0 <= KW_POOL)||LA363_0==KW_PRINCIPALS||(LA363_0 >= KW_PURGE && LA363_0 <= KW_QUERY_PARALLELISM)||LA363_0==KW_READ||(LA363_0 >= KW_REBUILD && LA363_0 <= KW_RECORDWRITER)||(LA363_0 >= KW_RELOAD && LA363_0 <= KW_RESTRICT)||LA363_0==KW_REWRITE||(LA363_0 >= KW_ROLE && LA363_0 <= KW_ROLES)||(LA363_0 >= KW_SCHEDULING_POLICY && LA363_0 <= KW_SECOND)||(LA363_0 >= KW_SEMI && LA363_0 <= KW_SERVER)||(LA363_0 >= KW_SETS && LA363_0 <= KW_SKEWED)||(LA363_0 >= KW_SNAPSHOT && LA363_0 <= KW_SSL)||(LA363_0 >= KW_STATISTICS && LA363_0 <= KW_SUMMARY)||LA363_0==KW_TABLES||(LA363_0 >= KW_TBLPROPERTIES && LA363_0 <= KW_TERMINATED)||LA363_0==KW_TINYINT||(LA363_0 >= KW_TOUCH && LA363_0 <= KW_TRANSACTIONS)||LA363_0==KW_UNARCHIVE||LA363_0==KW_UNDO||LA363_0==KW_UNIONTYPE||(LA363_0 >= KW_UNLOCK && LA363_0 <= KW_UNSIGNED)||(LA363_0 >= KW_URI && LA363_0 <= KW_USE)||(LA363_0 >= KW_UTC && LA363_0 <= KW_VALIDATE)||LA363_0==KW_VALUE_TYPE||(LA363_0 >= KW_VECTORIZATION && LA363_0 <= KW_WEEK)||LA363_0==KW_WHILE||(LA363_0 >= KW_WORK && LA363_0 <= KW_ZONE)||LA363_0==KW_BATCH||LA363_0==KW_DAYOFWEEK||LA363_0==KW_HOLD_DDLTIME||LA363_0==KW_IGNORE||LA363_0==KW_NO_DROP||LA363_0==KW_OFFLINE||LA363_0==KW_PROTECTION||LA363_0==KW_READONLY||LA363_0==KW_TIMESTAMPTZ) ) {
				alt363=1;
			}
			switch (alt363) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2938:32: ( KW_AS )? identifier
					{
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2938:32: ( KW_AS )?
					int alt362=2;
					int LA362_0 = input.LA(1);
					if ( (LA362_0==KW_AS) ) {
						alt362=1;
					}
					switch (alt362) {
						case 1 :
							// org/apache/hadoop/hive/ql/parse/HiveParser.g:2938:32: KW_AS
							{
							KW_AS1111=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_mergeStatement19485); if (state.failed) return retval; 
							if ( state.backtracking==0 ) stream_KW_AS.add(KW_AS1111);

							}
							break;

					}

					pushFollow(FOLLOW_identifier_in_mergeStatement19488);
					identifier1112=identifier();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_identifier.add(identifier1112.getTree());
					}
					break;

			}

			KW_USING1113=(Token)match(input,KW_USING,FOLLOW_KW_USING_in_mergeStatement19492); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_USING.add(KW_USING1113);

			pushFollow(FOLLOW_joinSourcePart_in_mergeStatement19494);
			joinSourcePart1114=joinSourcePart();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_joinSourcePart.add(joinSourcePart1114.getTree());
			KW_ON1115=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_mergeStatement19496); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_ON.add(KW_ON1115);

			pushFollow(FOLLOW_expression_in_mergeStatement19498);
			expression1116=expression();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_expression.add(expression1116.getTree());
			pushFollow(FOLLOW_whenClauses_in_mergeStatement19500);
			whenClauses1117=whenClauses();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_whenClauses.add(whenClauses1117.getTree());
			// AST REWRITE
			// elements: tableName, identifier, whenClauses, expression, joinSourcePart
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2938:105: -> ^( TOK_MERGE ^( TOK_TABREF tableName ( identifier )? ) joinSourcePart expression whenClauses )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2939:5: ^( TOK_MERGE ^( TOK_TABREF tableName ( identifier )? ) joinSourcePart expression whenClauses )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_MERGE, "TOK_MERGE"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2939:17: ^( TOK_TABREF tableName ( identifier )? )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_TABREF, "TOK_TABREF"), root_2);
				adaptor.addChild(root_2, stream_tableName.nextTree());
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2939:40: ( identifier )?
				if ( stream_identifier.hasNext() ) {
					adaptor.addChild(root_2, stream_identifier.nextTree());
				}
				stream_identifier.reset();

				adaptor.addChild(root_1, root_2);
				}

				adaptor.addChild(root_1, stream_joinSourcePart.nextTree());
				adaptor.addChild(root_1, stream_expression.nextTree());
				adaptor.addChild(root_1, stream_whenClauses.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "mergeStatement"


	public static class whenClauses_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "whenClauses"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2947:1: whenClauses : ( whenMatchedAndClause | whenMatchedThenClause )* ( whenNotMatchedClause )? ;
	public final HiveParser.whenClauses_return whenClauses() throws RecognitionException {
		HiveParser.whenClauses_return retval = new HiveParser.whenClauses_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		ParserRuleReturnScope whenMatchedAndClause1118 =null;
		ParserRuleReturnScope whenMatchedThenClause1119 =null;
		ParserRuleReturnScope whenNotMatchedClause1120 =null;


		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2948:4: ( ( whenMatchedAndClause | whenMatchedThenClause )* ( whenNotMatchedClause )? )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2949:4: ( whenMatchedAndClause | whenMatchedThenClause )* ( whenNotMatchedClause )?
			{
			root_0 = (ASTNode)adaptor.nil();


			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2949:4: ( whenMatchedAndClause | whenMatchedThenClause )*
			loop364:
			while (true) {
				int alt364=3;
				int LA364_0 = input.LA(1);
				if ( (LA364_0==KW_WHEN) ) {
					int LA364_1 = input.LA(2);
					if ( (LA364_1==KW_MATCHED) ) {
						int LA364_4 = input.LA(3);
						if ( (LA364_4==KW_AND) ) {
							alt364=1;
						}
						else if ( (LA364_4==KW_THEN) ) {
							alt364=2;
						}

					}

				}

				switch (alt364) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2949:5: whenMatchedAndClause
					{
					pushFollow(FOLLOW_whenMatchedAndClause_in_whenClauses19545);
					whenMatchedAndClause1118=whenMatchedAndClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, whenMatchedAndClause1118.getTree());

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2949:26: whenMatchedThenClause
					{
					pushFollow(FOLLOW_whenMatchedThenClause_in_whenClauses19547);
					whenMatchedThenClause1119=whenMatchedThenClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, whenMatchedThenClause1119.getTree());

					}
					break;

				default :
					break loop364;
				}
			}

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2949:50: ( whenNotMatchedClause )?
			int alt365=2;
			int LA365_0 = input.LA(1);
			if ( (LA365_0==KW_WHEN) ) {
				alt365=1;
			}
			switch (alt365) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2949:50: whenNotMatchedClause
					{
					pushFollow(FOLLOW_whenNotMatchedClause_in_whenClauses19551);
					whenNotMatchedClause1120=whenNotMatchedClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) adaptor.addChild(root_0, whenNotMatchedClause1120.getTree());

					}
					break;

			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "whenClauses"


	public static class whenNotMatchedClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "whenNotMatchedClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2951:1: whenNotMatchedClause : KW_WHEN KW_NOT KW_MATCHED ( KW_AND expression )? KW_THEN KW_INSERT KW_VALUES valueRowConstructor -> ^( TOK_NOT_MATCHED ^( TOK_INSERT valueRowConstructor ) ( expression )? ) ;
	public final HiveParser.whenNotMatchedClause_return whenNotMatchedClause() throws RecognitionException {
		HiveParser.whenNotMatchedClause_return retval = new HiveParser.whenNotMatchedClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_WHEN1121=null;
		Token KW_NOT1122=null;
		Token KW_MATCHED1123=null;
		Token KW_AND1124=null;
		Token KW_THEN1126=null;
		Token KW_INSERT1127=null;
		Token KW_VALUES1128=null;
		ParserRuleReturnScope expression1125 =null;
		ParserRuleReturnScope valueRowConstructor1129 =null;

		ASTNode KW_WHEN1121_tree=null;
		ASTNode KW_NOT1122_tree=null;
		ASTNode KW_MATCHED1123_tree=null;
		ASTNode KW_AND1124_tree=null;
		ASTNode KW_THEN1126_tree=null;
		ASTNode KW_INSERT1127_tree=null;
		ASTNode KW_VALUES1128_tree=null;
		RewriteRuleTokenStream stream_KW_WHEN=new RewriteRuleTokenStream(adaptor,"token KW_WHEN");
		RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
		RewriteRuleTokenStream stream_KW_AND=new RewriteRuleTokenStream(adaptor,"token KW_AND");
		RewriteRuleTokenStream stream_KW_THEN=new RewriteRuleTokenStream(adaptor,"token KW_THEN");
		RewriteRuleTokenStream stream_KW_INSERT=new RewriteRuleTokenStream(adaptor,"token KW_INSERT");
		RewriteRuleTokenStream stream_KW_MATCHED=new RewriteRuleTokenStream(adaptor,"token KW_MATCHED");
		RewriteRuleTokenStream stream_KW_VALUES=new RewriteRuleTokenStream(adaptor,"token KW_VALUES");
		RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");
		RewriteRuleSubtreeStream stream_valueRowConstructor=new RewriteRuleSubtreeStream(adaptor,"rule valueRowConstructor");

		 pushMsg("WHEN NOT MATCHED clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2954:4: ( KW_WHEN KW_NOT KW_MATCHED ( KW_AND expression )? KW_THEN KW_INSERT KW_VALUES valueRowConstructor -> ^( TOK_NOT_MATCHED ^( TOK_INSERT valueRowConstructor ) ( expression )? ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2955:3: KW_WHEN KW_NOT KW_MATCHED ( KW_AND expression )? KW_THEN KW_INSERT KW_VALUES valueRowConstructor
			{
			KW_WHEN1121=(Token)match(input,KW_WHEN,FOLLOW_KW_WHEN_in_whenNotMatchedClause19578); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WHEN.add(KW_WHEN1121);

			KW_NOT1122=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_whenNotMatchedClause19580); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_NOT.add(KW_NOT1122);

			KW_MATCHED1123=(Token)match(input,KW_MATCHED,FOLLOW_KW_MATCHED_in_whenNotMatchedClause19582); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MATCHED.add(KW_MATCHED1123);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2955:29: ( KW_AND expression )?
			int alt366=2;
			int LA366_0 = input.LA(1);
			if ( (LA366_0==KW_AND) ) {
				alt366=1;
			}
			switch (alt366) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2955:30: KW_AND expression
					{
					KW_AND1124=(Token)match(input,KW_AND,FOLLOW_KW_AND_in_whenNotMatchedClause19585); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_AND.add(KW_AND1124);

					pushFollow(FOLLOW_expression_in_whenNotMatchedClause19587);
					expression1125=expression();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_expression.add(expression1125.getTree());
					}
					break;

			}

			KW_THEN1126=(Token)match(input,KW_THEN,FOLLOW_KW_THEN_in_whenNotMatchedClause19591); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_THEN.add(KW_THEN1126);

			KW_INSERT1127=(Token)match(input,KW_INSERT,FOLLOW_KW_INSERT_in_whenNotMatchedClause19593); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_INSERT.add(KW_INSERT1127);

			KW_VALUES1128=(Token)match(input,KW_VALUES,FOLLOW_KW_VALUES_in_whenNotMatchedClause19595); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_VALUES.add(KW_VALUES1128);

			pushFollow(FOLLOW_valueRowConstructor_in_whenNotMatchedClause19597);
			valueRowConstructor1129=valueRowConstructor();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_valueRowConstructor.add(valueRowConstructor1129.getTree());
			// AST REWRITE
			// elements: expression, valueRowConstructor
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2955:98: -> ^( TOK_NOT_MATCHED ^( TOK_INSERT valueRowConstructor ) ( expression )? )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2956:5: ^( TOK_NOT_MATCHED ^( TOK_INSERT valueRowConstructor ) ( expression )? )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_NOT_MATCHED, "TOK_NOT_MATCHED"), root_1);
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2956:23: ^( TOK_INSERT valueRowConstructor )
				{
				ASTNode root_2 = (ASTNode)adaptor.nil();
				root_2 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_INSERT, "TOK_INSERT"), root_2);
				adaptor.addChild(root_2, stream_valueRowConstructor.nextTree());
				adaptor.addChild(root_1, root_2);
				}

				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2956:57: ( expression )?
				if ( stream_expression.hasNext() ) {
					adaptor.addChild(root_1, stream_expression.nextTree());
				}
				stream_expression.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "whenNotMatchedClause"


	public static class whenMatchedAndClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "whenMatchedAndClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2958:1: whenMatchedAndClause : KW_WHEN KW_MATCHED KW_AND expression KW_THEN updateOrDelete -> ^( TOK_MATCHED updateOrDelete expression ) ;
	public final HiveParser.whenMatchedAndClause_return whenMatchedAndClause() throws RecognitionException {
		HiveParser.whenMatchedAndClause_return retval = new HiveParser.whenMatchedAndClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_WHEN1130=null;
		Token KW_MATCHED1131=null;
		Token KW_AND1132=null;
		Token KW_THEN1134=null;
		ParserRuleReturnScope expression1133 =null;
		ParserRuleReturnScope updateOrDelete1135 =null;

		ASTNode KW_WHEN1130_tree=null;
		ASTNode KW_MATCHED1131_tree=null;
		ASTNode KW_AND1132_tree=null;
		ASTNode KW_THEN1134_tree=null;
		RewriteRuleTokenStream stream_KW_WHEN=new RewriteRuleTokenStream(adaptor,"token KW_WHEN");
		RewriteRuleTokenStream stream_KW_AND=new RewriteRuleTokenStream(adaptor,"token KW_AND");
		RewriteRuleTokenStream stream_KW_THEN=new RewriteRuleTokenStream(adaptor,"token KW_THEN");
		RewriteRuleTokenStream stream_KW_MATCHED=new RewriteRuleTokenStream(adaptor,"token KW_MATCHED");
		RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");
		RewriteRuleSubtreeStream stream_updateOrDelete=new RewriteRuleSubtreeStream(adaptor,"rule updateOrDelete");

		 pushMsg("WHEN MATCHED AND clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2961:3: ( KW_WHEN KW_MATCHED KW_AND expression KW_THEN updateOrDelete -> ^( TOK_MATCHED updateOrDelete expression ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2962:3: KW_WHEN KW_MATCHED KW_AND expression KW_THEN updateOrDelete
			{
			KW_WHEN1130=(Token)match(input,KW_WHEN,FOLLOW_KW_WHEN_in_whenMatchedAndClause19640); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WHEN.add(KW_WHEN1130);

			KW_MATCHED1131=(Token)match(input,KW_MATCHED,FOLLOW_KW_MATCHED_in_whenMatchedAndClause19642); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MATCHED.add(KW_MATCHED1131);

			KW_AND1132=(Token)match(input,KW_AND,FOLLOW_KW_AND_in_whenMatchedAndClause19644); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_AND.add(KW_AND1132);

			pushFollow(FOLLOW_expression_in_whenMatchedAndClause19646);
			expression1133=expression();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_expression.add(expression1133.getTree());
			KW_THEN1134=(Token)match(input,KW_THEN,FOLLOW_KW_THEN_in_whenMatchedAndClause19648); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_THEN.add(KW_THEN1134);

			pushFollow(FOLLOW_updateOrDelete_in_whenMatchedAndClause19650);
			updateOrDelete1135=updateOrDelete();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_updateOrDelete.add(updateOrDelete1135.getTree());
			// AST REWRITE
			// elements: expression, updateOrDelete
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2962:63: -> ^( TOK_MATCHED updateOrDelete expression )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2963:5: ^( TOK_MATCHED updateOrDelete expression )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_MATCHED, "TOK_MATCHED"), root_1);
				adaptor.addChild(root_1, stream_updateOrDelete.nextTree());
				adaptor.addChild(root_1, stream_expression.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "whenMatchedAndClause"


	public static class whenMatchedThenClause_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "whenMatchedThenClause"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2965:1: whenMatchedThenClause : KW_WHEN KW_MATCHED KW_THEN updateOrDelete -> ^( TOK_MATCHED updateOrDelete ) ;
	public final HiveParser.whenMatchedThenClause_return whenMatchedThenClause() throws RecognitionException {
		HiveParser.whenMatchedThenClause_return retval = new HiveParser.whenMatchedThenClause_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_WHEN1136=null;
		Token KW_MATCHED1137=null;
		Token KW_THEN1138=null;
		ParserRuleReturnScope updateOrDelete1139 =null;

		ASTNode KW_WHEN1136_tree=null;
		ASTNode KW_MATCHED1137_tree=null;
		ASTNode KW_THEN1138_tree=null;
		RewriteRuleTokenStream stream_KW_WHEN=new RewriteRuleTokenStream(adaptor,"token KW_WHEN");
		RewriteRuleTokenStream stream_KW_THEN=new RewriteRuleTokenStream(adaptor,"token KW_THEN");
		RewriteRuleTokenStream stream_KW_MATCHED=new RewriteRuleTokenStream(adaptor,"token KW_MATCHED");
		RewriteRuleSubtreeStream stream_updateOrDelete=new RewriteRuleSubtreeStream(adaptor,"rule updateOrDelete");

		 pushMsg("WHEN MATCHED THEN clause", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2968:3: ( KW_WHEN KW_MATCHED KW_THEN updateOrDelete -> ^( TOK_MATCHED updateOrDelete ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2969:3: KW_WHEN KW_MATCHED KW_THEN updateOrDelete
			{
			KW_WHEN1136=(Token)match(input,KW_WHEN,FOLLOW_KW_WHEN_in_whenMatchedThenClause19688); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_WHEN.add(KW_WHEN1136);

			KW_MATCHED1137=(Token)match(input,KW_MATCHED,FOLLOW_KW_MATCHED_in_whenMatchedThenClause19690); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_MATCHED.add(KW_MATCHED1137);

			KW_THEN1138=(Token)match(input,KW_THEN,FOLLOW_KW_THEN_in_whenMatchedThenClause19692); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_THEN.add(KW_THEN1138);

			pushFollow(FOLLOW_updateOrDelete_in_whenMatchedThenClause19694);
			updateOrDelete1139=updateOrDelete();
			state._fsp--;
			if (state.failed) return retval;
			if ( state.backtracking==0 ) stream_updateOrDelete.add(updateOrDelete1139.getTree());
			// AST REWRITE
			// elements: updateOrDelete
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2969:45: -> ^( TOK_MATCHED updateOrDelete )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2970:6: ^( TOK_MATCHED updateOrDelete )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_MATCHED, "TOK_MATCHED"), root_1);
				adaptor.addChild(root_1, stream_updateOrDelete.nextTree());
				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "whenMatchedThenClause"


	public static class updateOrDelete_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "updateOrDelete"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2972:1: updateOrDelete : ( KW_UPDATE setColumnsClause -> ^( TOK_UPDATE setColumnsClause ) | KW_DELETE -> TOK_DELETE );
	public final HiveParser.updateOrDelete_return updateOrDelete() throws RecognitionException {
		HiveParser.updateOrDelete_return retval = new HiveParser.updateOrDelete_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_UPDATE1140=null;
		Token KW_DELETE1142=null;
		ParserRuleReturnScope setColumnsClause1141 =null;

		ASTNode KW_UPDATE1140_tree=null;
		ASTNode KW_DELETE1142_tree=null;
		RewriteRuleTokenStream stream_KW_DELETE=new RewriteRuleTokenStream(adaptor,"token KW_DELETE");
		RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
		RewriteRuleSubtreeStream stream_setColumnsClause=new RewriteRuleSubtreeStream(adaptor,"rule setColumnsClause");

		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2973:4: ( KW_UPDATE setColumnsClause -> ^( TOK_UPDATE setColumnsClause ) | KW_DELETE -> TOK_DELETE )
			int alt367=2;
			int LA367_0 = input.LA(1);
			if ( (LA367_0==KW_UPDATE) ) {
				alt367=1;
			}
			else if ( (LA367_0==KW_DELETE) ) {
				alt367=2;
			}

			else {
				if (state.backtracking>0) {state.failed=true; return retval;}
				NoViableAltException nvae =
					new NoViableAltException("", 367, 0, input);
				throw nvae;
			}

			switch (alt367) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2974:4: KW_UPDATE setColumnsClause
					{
					KW_UPDATE1140=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_updateOrDelete19723); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_UPDATE.add(KW_UPDATE1140);

					pushFollow(FOLLOW_setColumnsClause_in_updateOrDelete19725);
					setColumnsClause1141=setColumnsClause();
					state._fsp--;
					if (state.failed) return retval;
					if ( state.backtracking==0 ) stream_setColumnsClause.add(setColumnsClause1141.getTree());
					// AST REWRITE
					// elements: setColumnsClause
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2974:31: -> ^( TOK_UPDATE setColumnsClause )
					{
						// org/apache/hadoop/hive/ql/parse/HiveParser.g:2974:34: ^( TOK_UPDATE setColumnsClause )
						{
						ASTNode root_1 = (ASTNode)adaptor.nil();
						root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_UPDATE, "TOK_UPDATE"), root_1);
						adaptor.addChild(root_1, stream_setColumnsClause.nextTree());
						adaptor.addChild(root_0, root_1);
						}

					}


					retval.tree = root_0;
					}

					}
					break;
				case 2 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2976:4: KW_DELETE
					{
					KW_DELETE1142=(Token)match(input,KW_DELETE,FOLLOW_KW_DELETE_in_updateOrDelete19743); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_KW_DELETE.add(KW_DELETE1142);

					// AST REWRITE
					// elements: 
					// token labels: 
					// rule labels: retval
					// token list labels: 
					// rule list labels: 
					// wildcard labels: 
					if ( state.backtracking==0 ) {
					retval.tree = root_0;
					RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

					root_0 = (ASTNode)adaptor.nil();
					// 2976:14: -> TOK_DELETE
					{
						adaptor.addChild(root_0, (ASTNode)adaptor.create(TOK_DELETE, "TOK_DELETE"));
					}


					retval.tree = root_0;
					}

					}
					break;

			}
			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "updateOrDelete"


	public static class killQueryStatement_return extends ParserRuleReturnScope {
		ASTNode tree;
		@Override
		public ASTNode getTree() { return tree; }
	};


	// $ANTLR start "killQueryStatement"
	// org/apache/hadoop/hive/ql/parse/HiveParser.g:2982:1: killQueryStatement : KW_KILL KW_QUERY ( StringLiteral )+ -> ^( TOK_KILL_QUERY ( StringLiteral )+ ) ;
	public final HiveParser.killQueryStatement_return killQueryStatement() throws RecognitionException {
		HiveParser.killQueryStatement_return retval = new HiveParser.killQueryStatement_return();
		retval.start = input.LT(1);

		ASTNode root_0 = null;

		Token KW_KILL1143=null;
		Token KW_QUERY1144=null;
		Token StringLiteral1145=null;

		ASTNode KW_KILL1143_tree=null;
		ASTNode KW_QUERY1144_tree=null;
		ASTNode StringLiteral1145_tree=null;
		RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
		RewriteRuleTokenStream stream_KW_KILL=new RewriteRuleTokenStream(adaptor,"token KW_KILL");
		RewriteRuleTokenStream stream_KW_QUERY=new RewriteRuleTokenStream(adaptor,"token KW_QUERY");

		 pushMsg("kill query statement", state); 
		try {
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2985:3: ( KW_KILL KW_QUERY ( StringLiteral )+ -> ^( TOK_KILL_QUERY ( StringLiteral )+ ) )
			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2986:3: KW_KILL KW_QUERY ( StringLiteral )+
			{
			KW_KILL1143=(Token)match(input,KW_KILL,FOLLOW_KW_KILL_in_killQueryStatement19775); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_KILL.add(KW_KILL1143);

			KW_QUERY1144=(Token)match(input,KW_QUERY,FOLLOW_KW_QUERY_in_killQueryStatement19777); if (state.failed) return retval; 
			if ( state.backtracking==0 ) stream_KW_QUERY.add(KW_QUERY1144);

			// org/apache/hadoop/hive/ql/parse/HiveParser.g:2986:20: ( StringLiteral )+
			int cnt368=0;
			loop368:
			while (true) {
				int alt368=2;
				int LA368_0 = input.LA(1);
				if ( (LA368_0==StringLiteral) ) {
					alt368=1;
				}

				switch (alt368) {
				case 1 :
					// org/apache/hadoop/hive/ql/parse/HiveParser.g:2986:22: StringLiteral
					{
					StringLiteral1145=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_killQueryStatement19781); if (state.failed) return retval; 
					if ( state.backtracking==0 ) stream_StringLiteral.add(StringLiteral1145);

					}
					break;

				default :
					if ( cnt368 >= 1 ) break loop368;
					if (state.backtracking>0) {state.failed=true; return retval;}
					EarlyExitException eee = new EarlyExitException(368, input);
					throw eee;
				}
				cnt368++;
			}

			// AST REWRITE
			// elements: StringLiteral
			// token labels: 
			// rule labels: retval
			// token list labels: 
			// rule list labels: 
			// wildcard labels: 
			if ( state.backtracking==0 ) {
			retval.tree = root_0;
			RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);

			root_0 = (ASTNode)adaptor.nil();
			// 2986:39: -> ^( TOK_KILL_QUERY ( StringLiteral )+ )
			{
				// org/apache/hadoop/hive/ql/parse/HiveParser.g:2986:42: ^( TOK_KILL_QUERY ( StringLiteral )+ )
				{
				ASTNode root_1 = (ASTNode)adaptor.nil();
				root_1 = (ASTNode)adaptor.becomeRoot((ASTNode)adaptor.create(TOK_KILL_QUERY, "TOK_KILL_QUERY"), root_1);
				if ( !(stream_StringLiteral.hasNext()) ) {
					throw new RewriteEarlyExitException();
				}
				while ( stream_StringLiteral.hasNext() ) {
					adaptor.addChild(root_1, stream_StringLiteral.nextNode());
				}
				stream_StringLiteral.reset();

				adaptor.addChild(root_0, root_1);
				}

			}


			retval.tree = root_0;
			}

			}

			retval.stop = input.LT(-1);

			if ( state.backtracking==0 ) {
			retval.tree = (ASTNode)adaptor.rulePostProcessing(root_0);
			adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
			}
			if ( state.backtracking==0 ) { popMsg(state); }
		}

		catch (RecognitionException e) {
		 reportError(e);
		  throw e;
		}

		finally {
			// do for sure before leaving
		}
		return retval;
	}
	// $ANTLR end "killQueryStatement"

	// $ANTLR start synpred1_HiveParser
	public final void synpred1_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:946:7: ( grantPrivileges )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:946:8: grantPrivileges
		{
		pushFollow(FOLLOW_grantPrivileges_in_synpred1_HiveParser2793);
		grantPrivileges();
		state._fsp--;
		if (state.failed) return;

		}

	}
	// $ANTLR end synpred1_HiveParser

	// $ANTLR start synpred2_HiveParser
	public final void synpred2_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:947:7: ( revokePrivileges )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:947:8: revokePrivileges
		{
		pushFollow(FOLLOW_revokePrivileges_in_synpred2_HiveParser2807);
		revokePrivileges();
		state._fsp--;
		if (state.failed) return;

		}

	}
	// $ANTLR end synpred2_HiveParser

	// $ANTLR start synpred3_HiveParser
	public final void synpred3_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1127:7: ( alterStatementSuffixRename[true] )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1127:8: alterStatementSuffixRename[true]
		{
		pushFollow(FOLLOW_alterStatementSuffixRename_in_synpred3_HiveParser4281);
		alterStatementSuffixRename(true);
		state._fsp--;
		if (state.failed) return;

		}

	}
	// $ANTLR end synpred3_HiveParser

	// $ANTLR start synpred4_HiveParser
	public final void synpred4_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1514:4: ( KW_ELEM_TYPE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1514:5: KW_ELEM_TYPE
		{
		match(input,KW_ELEM_TYPE,FOLLOW_KW_ELEM_TYPE_in_synpred4_HiveParser7125); if (state.failed) return;

		}

	}
	// $ANTLR end synpred4_HiveParser

	// $ANTLR start synpred5_HiveParser
	public final void synpred5_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1516:4: ( KW_KEY_TYPE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1516:5: KW_KEY_TYPE
		{
		match(input,KW_KEY_TYPE,FOLLOW_KW_KEY_TYPE_in_synpred5_HiveParser7142); if (state.failed) return;

		}

	}
	// $ANTLR end synpred5_HiveParser

	// $ANTLR start synpred6_HiveParser
	public final void synpred6_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1518:4: ( KW_VALUE_TYPE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1518:5: KW_VALUE_TYPE
		{
		match(input,KW_VALUE_TYPE,FOLLOW_KW_VALUE_TYPE_in_synpred6_HiveParser7159); if (state.failed) return;

		}

	}
	// $ANTLR end synpred6_HiveParser

	// $ANTLR start synpred7_HiveParser
	public final void synpred7_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:5: ( KW_DATABASE | KW_SCHEMA )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:
		{
		if ( input.LA(1)==KW_DATABASE||input.LA(1)==KW_SCHEMA ) {
			input.consume();
			state.errorRecovery=false;
			state.failed=false;
		}
		else {
			if (state.backtracking>0) {state.failed=true; return;}
			MismatchedSetException mse = new MismatchedSetException(null,input);
			throw mse;
		}
		}

	}
	// $ANTLR end synpred7_HiveParser

	// $ANTLR start synpred8_HiveParser
	public final void synpred8_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:5: ( KW_FUNCTION )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1544:6: KW_FUNCTION
		{
		match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_synpred8_HiveParser7367); if (state.failed) return;

		}

	}
	// $ANTLR end synpred8_HiveParser

	// $ANTLR start synpred9_HiveParser
	public final void synpred9_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:5: ( KW_FORMATTED | KW_EXTENDED )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:
		{
		if ( input.LA(1)==KW_EXTENDED||input.LA(1)==KW_FORMATTED ) {
			input.consume();
			state.errorRecovery=false;
			state.failed=false;
		}
		else {
			if (state.backtracking>0) {state.failed=true; return;}
			MismatchedSetException mse = new MismatchedSetException(null,input);
			throw mse;
		}
		}

	}
	// $ANTLR end synpred9_HiveParser

	// $ANTLR start synpred10_HiveParser
	public final void synpred10_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:7: ( KW_COMPUTE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:8: KW_COMPUTE
		{
		match(input,KW_COMPUTE,FOLLOW_KW_COMPUTE_in_synpred10_HiveParser7520); if (state.failed) return;

		}

	}
	// $ANTLR end synpred10_HiveParser

	// $ANTLR start synpred11_HiveParser
	public final void synpred11_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1561:7: ( KW_CACHE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1561:8: KW_CACHE
		{
		match(input,KW_CACHE,FOLLOW_KW_CACHE_in_synpred11_HiveParser7648); if (state.failed) return;

		}

	}
	// $ANTLR end synpred11_HiveParser

	// $ANTLR start synpred12_HiveParser
	public final void synpred12_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1577:9: ( KW_DATABASE | KW_SCHEMA )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:
		{
		if ( input.LA(1)==KW_DATABASE||input.LA(1)==KW_SCHEMA ) {
			input.consume();
			state.errorRecovery=false;
			state.failed=false;
		}
		else {
			if (state.backtracking>0) {state.failed=true; return;}
			MismatchedSetException mse = new MismatchedSetException(null,input);
			throw mse;
		}
		}

	}
	// $ANTLR end synpred12_HiveParser

	// $ANTLR start synpred13_HiveParser
	public final void synpred13_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1586:7: ( KW_DATABASE | KW_SCHEMA )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:
		{
		if ( input.LA(1)==KW_DATABASE||input.LA(1)==KW_SCHEMA ) {
			input.consume();
			state.errorRecovery=false;
			state.failed=false;
		}
		else {
			if (state.backtracking>0) {state.failed=true; return;}
			MismatchedSetException mse = new MismatchedSetException(null,input);
			throw mse;
		}
		}

	}
	// $ANTLR end synpred13_HiveParser

	// $ANTLR start synpred14_HiveParser
	public final void synpred14_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1702:5: ( KW_ALL )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1702:6: KW_ALL
		{
		match(input,KW_ALL,FOLLOW_KW_ALL_in_synpred14_HiveParser9164); if (state.failed) return;

		}

	}
	// $ANTLR end synpred14_HiveParser

	// $ANTLR start synpred15_HiveParser
	public final void synpred15_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:5: ( KW_NONE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:6: KW_NONE
		{
		match(input,KW_NONE,FOLLOW_KW_NONE_in_synpred15_HiveParser9195); if (state.failed) return;

		}

	}
	// $ANTLR end synpred15_HiveParser

	// $ANTLR start synpred16_HiveParser
	public final void synpred16_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1728:7: ( KW_ALL )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1728:8: KW_ALL
		{
		match(input,KW_ALL,FOLLOW_KW_ALL_in_synpred16_HiveParser9369); if (state.failed) return;

		}

	}
	// $ANTLR end synpred16_HiveParser

	// $ANTLR start synpred17_HiveParser
	public final void synpred17_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:117: ( storedAsDirs )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:118: storedAsDirs
		{
		pushFollow(FOLLOW_storedAsDirs_in_synpred17_HiveParser11646);
		storedAsDirs();
		state._fsp--;
		if (state.failed) return;

		}

	}
	// $ANTLR end synpred17_HiveParser

	// $ANTLR start synpred18_HiveParser
	public final void synpred18_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:7: ( KW_STORED KW_AS KW_INPUTFORMAT )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:8: KW_STORED KW_AS KW_INPUTFORMAT
		{
		match(input,KW_STORED,FOLLOW_KW_STORED_in_synpred18_HiveParser12627); if (state.failed) return;

		match(input,KW_AS,FOLLOW_KW_AS_in_synpred18_HiveParser12629); if (state.failed) return;

		match(input,KW_INPUTFORMAT,FOLLOW_KW_INPUTFORMAT_in_synpred18_HiveParser12631); if (state.failed) return;

		}

	}
	// $ANTLR end synpred18_HiveParser

	// $ANTLR start synpred19_HiveParser
	public final void synpred19_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:25: ( KW_ELEM_TYPE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:26: KW_ELEM_TYPE
		{
		match(input,KW_ELEM_TYPE,FOLLOW_KW_ELEM_TYPE_in_synpred19_HiveParser13069); if (state.failed) return;

		}

	}
	// $ANTLR end synpred19_HiveParser

	// $ANTLR start synpred20_HiveParser
	public final void synpred20_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:58: ( KW_KEY_TYPE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:59: KW_KEY_TYPE
		{
		match(input,KW_KEY_TYPE,FOLLOW_KW_KEY_TYPE_in_synpred20_HiveParser13079); if (state.failed) return;

		}

	}
	// $ANTLR end synpred20_HiveParser

	// $ANTLR start synpred21_HiveParser
	public final void synpred21_HiveParser_fragment() throws RecognitionException {
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:89: ( KW_VALUE_TYPE )
		// org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:90: KW_VALUE_TYPE
		{
		match(input,KW_VALUE_TYPE,FOLLOW_KW_VALUE_TYPE_in_synpred21_HiveParser13089); if (state.failed) return;

		}

	}
	// $ANTLR end synpred21_HiveParser

	// Delegated rules
	public HiveParser_IdentifiersParser.expressions_return expressions() throws RecognitionException { return gIdentifiersParser.expressions(); }

	public HiveParser_ResourcePlanParser.withReplace_return withReplace() throws RecognitionException { return gResourcePlanParser.withReplace(); }

	public HiveParser_ResourcePlanParser.triggerAndExpression_return triggerAndExpression() throws RecognitionException { return gResourcePlanParser.triggerAndExpression(); }

	public HiveParser_ResourcePlanParser.alterMappingStatement_return alterMappingStatement() throws RecognitionException { return gResourcePlanParser.alterMappingStatement(); }

	public HiveParser_ResourcePlanParser.triggerExpressionStandalone_return triggerExpressionStandalone() throws RecognitionException { return gResourcePlanParser.triggerExpressionStandalone(); }

	public HiveParser_IdentifiersParser.groupingExpressionSingle_return groupingExpressionSingle() throws RecognitionException { return gIdentifiersParser.groupingExpressionSingle(); }

	public HiveParser_IdentifiersParser.precedenceBitwiseOrExpression_return precedenceBitwiseOrExpression() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseOrExpression(); }

	public HiveParser_IdentifiersParser.caseExpression_return caseExpression() throws RecognitionException { return gIdentifiersParser.caseExpression(); }

	public HiveParser_IdentifiersParser.tableOrPartition_return tableOrPartition() throws RecognitionException { return gIdentifiersParser.tableOrPartition(); }

	public HiveParser_ResourcePlanParser.createTriggerStatement_return createTriggerStatement() throws RecognitionException { return gResourcePlanParser.createTriggerStatement(); }

	public HiveParser_ResourcePlanParser.resourcePlanDdlStatements_return resourcePlanDdlStatements() throws RecognitionException { return gResourcePlanParser.resourcePlanDdlStatements(); }

	public HiveParser_FromClauseParser.tableSample_return tableSample() throws RecognitionException { return gFromClauseParser.tableSample(); }

	public HiveParser_FromClauseParser.valueRowConstructor_return valueRowConstructor() throws RecognitionException { return gFromClauseParser.valueRowConstructor(); }

	public HiveParser_ResourcePlanParser.poolAssign_return poolAssign() throws RecognitionException { return gResourcePlanParser.poolAssign(); }

	public HiveParser_FromClauseParser.partitioningSpec_return partitioningSpec() throws RecognitionException { return gFromClauseParser.partitioningSpec(); }

	public HiveParser_ResourcePlanParser.rpAssignList_return rpAssignList() throws RecognitionException { return gResourcePlanParser.rpAssignList(); }

	public HiveParser_IdentifiersParser.intervalExpression_return intervalExpression() throws RecognitionException { return gIdentifiersParser.intervalExpression(); }

	public HiveParser_IdentifiersParser.precedencePlusExpression_return precedencePlusExpression() throws RecognitionException { return gIdentifiersParser.precedencePlusExpression(); }

	public HiveParser_ResourcePlanParser.unmanaged_return unmanaged() throws RecognitionException { return gResourcePlanParser.unmanaged(); }

	public HiveParser_ResourcePlanParser.alterPoolStatement_return alterPoolStatement() throws RecognitionException { return gResourcePlanParser.alterPoolStatement(); }

	public HiveParser_IdentifiersParser.whenExpression_return whenExpression() throws RecognitionException { return gIdentifiersParser.whenExpression(); }

	public HiveParser_FromClauseParser.viewName_return viewName() throws RecognitionException { return gFromClauseParser.viewName(); }

	public HiveParser_SelectClauseParser.selectClause_return selectClause() throws RecognitionException { return gSelectClauseParser.selectClause(); }

	public HiveParser_IdentifiersParser.precedenceUnaryPrefixExpression_return precedenceUnaryPrefixExpression() throws RecognitionException { return gIdentifiersParser.precedenceUnaryPrefixExpression(); }

	public HiveParser_FromClauseParser.atomjoinSource_return atomjoinSource() throws RecognitionException { return gFromClauseParser.atomjoinSource(); }

	public HiveParser_FromClauseParser.subQuerySource_return subQuerySource() throws RecognitionException { return gFromClauseParser.subQuerySource(); }

	public HiveParser_IdentifiersParser.function_return function() throws RecognitionException { return gIdentifiersParser.function(); }

	public HiveParser_IdentifiersParser.functionName_return functionName() throws RecognitionException { return gIdentifiersParser.functionName(); }

	public HiveParser_FromClauseParser.virtualTableSource_return virtualTableSource() throws RecognitionException { return gFromClauseParser.virtualTableSource(); }

	public HiveParser_IdentifiersParser.partitionSpec_return partitionSpec() throws RecognitionException { return gIdentifiersParser.partitionSpec(); }

	public HiveParser_ResourcePlanParser.createResourcePlanStatement_return createResourcePlanStatement() throws RecognitionException { return gResourcePlanParser.createResourcePlanStatement(); }

	public HiveParser_IdentifiersParser.charSetStringLiteral_return charSetStringLiteral() throws RecognitionException { return gIdentifiersParser.charSetStringLiteral(); }

	public HiveParser_SelectClauseParser.trfmClause_return trfmClause() throws RecognitionException { return gSelectClauseParser.trfmClause(); }

	public HiveParser_IdentifiersParser.castExpression_return castExpression() throws RecognitionException { return gIdentifiersParser.castExpression(); }

	public HiveParser_SelectClauseParser.selectList_return selectList() throws RecognitionException { return gSelectClauseParser.selectList(); }

	public HiveParser_IdentifiersParser.groupingSetExpression_return groupingSetExpression() throws RecognitionException { return gIdentifiersParser.groupingSetExpression(); }

	public HiveParser_IdentifiersParser.precedenceSimilarExpressionPart_return precedenceSimilarExpressionPart(CommonTree t) throws RecognitionException { return gIdentifiersParser.precedenceSimilarExpressionPart(t); }

	public HiveParser_IdentifiersParser.precedenceSimilarExpression_return precedenceSimilarExpression() throws RecognitionException { return gIdentifiersParser.precedenceSimilarExpression(); }

	public HiveParser_IdentifiersParser.precedenceNotExpression_return precedenceNotExpression() throws RecognitionException { return gIdentifiersParser.precedenceNotExpression(); }

	public HiveParser_ResourcePlanParser.globalWmStatement_return globalWmStatement() throws RecognitionException { return gResourcePlanParser.globalWmStatement(); }

	public HiveParser_IdentifiersParser.intervalValue_return intervalValue() throws RecognitionException { return gIdentifiersParser.intervalValue(); }

	public HiveParser_IdentifiersParser.precedenceBitwiseXorExpression_return precedenceBitwiseXorExpression() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseXorExpression(); }

	public HiveParser_IdentifiersParser.groupingSetExpressionMultiple_return groupingSetExpressionMultiple() throws RecognitionException { return gIdentifiersParser.groupingSetExpressionMultiple(); }

	public HiveParser_SelectClauseParser.window_clause_return window_clause() throws RecognitionException { return gSelectClauseParser.window_clause(); }

	public HiveParser_IdentifiersParser.sortByClause_return sortByClause() throws RecognitionException { return gIdentifiersParser.sortByClause(); }

	public HiveParser_IdentifiersParser.groupby_expression_return groupby_expression() throws RecognitionException { return gIdentifiersParser.groupby_expression(); }

	public HiveParser_SelectClauseParser.window_frame_return window_frame() throws RecognitionException { return gSelectClauseParser.window_frame(); }

	public HiveParser_IdentifiersParser.extractExpression_return extractExpression() throws RecognitionException { return gIdentifiersParser.extractExpression(); }

	public HiveParser_ResourcePlanParser.activate_return activate() throws RecognitionException { return gResourcePlanParser.activate(); }

	public HiveParser_IdentifiersParser.expression_return expression() throws RecognitionException { return gIdentifiersParser.expression(); }

	public HiveParser_IdentifiersParser.sysFuncNames_return sysFuncNames() throws RecognitionException { return gIdentifiersParser.sysFuncNames(); }

	public HiveParser_IdentifiersParser.columnRefOrderNotInParenthesis_return columnRefOrderNotInParenthesis() throws RecognitionException { return gIdentifiersParser.columnRefOrderNotInParenthesis(); }

	public HiveParser_IdentifiersParser.precedenceOrOperator_return precedenceOrOperator() throws RecognitionException { return gIdentifiersParser.precedenceOrOperator(); }

	public HiveParser_IdentifiersParser.precedenceEqualExpression_return precedenceEqualExpression() throws RecognitionException { return gIdentifiersParser.precedenceEqualExpression(); }

	public HiveParser_ResourcePlanParser.dropMappingStatement_return dropMappingStatement() throws RecognitionException { return gResourcePlanParser.dropMappingStatement(); }

	public HiveParser_IdentifiersParser.subQueryExpression_return subQueryExpression() throws RecognitionException { return gIdentifiersParser.subQueryExpression(); }

	public HiveParser_ResourcePlanParser.enable_return enable() throws RecognitionException { return gResourcePlanParser.enable(); }

	public HiveParser_FromClauseParser.expressionList_return expressionList() throws RecognitionException { return gFromClauseParser.expressionList(); }

	public HiveParser_IdentifiersParser.descFuncNames_return descFuncNames() throws RecognitionException { return gIdentifiersParser.descFuncNames(); }

	public HiveParser_IdentifiersParser.intervalLiteral_return intervalLiteral() throws RecognitionException { return gIdentifiersParser.intervalLiteral(); }

	public HiveParser_SelectClauseParser.selectExpressionList_return selectExpressionList() throws RecognitionException { return gSelectClauseParser.selectExpressionList(); }

	public HiveParser_IdentifiersParser.stringLiteralSequence_return stringLiteralSequence() throws RecognitionException { return gIdentifiersParser.stringLiteralSequence(); }

	public HiveParser_ResourcePlanParser.dropResourcePlanStatement_return dropResourcePlanStatement() throws RecognitionException { return gResourcePlanParser.dropResourcePlanStatement(); }

	public HiveParser_IdentifiersParser.dateLiteral_return dateLiteral() throws RecognitionException { return gIdentifiersParser.dateLiteral(); }

	public HiveParser_IdentifiersParser.dropPartitionOperator_return dropPartitionOperator() throws RecognitionException { return gIdentifiersParser.dropPartitionOperator(); }

	public HiveParser_ResourcePlanParser.triggerOrExpression_return triggerOrExpression() throws RecognitionException { return gResourcePlanParser.triggerOrExpression(); }

	public HiveParser_FromClauseParser.aliasList_return aliasList() throws RecognitionException { return gFromClauseParser.aliasList(); }

	public HiveParser_SelectClauseParser.window_specification_return window_specification() throws RecognitionException { return gSelectClauseParser.window_specification(); }

	public HiveParser_FromClauseParser.joinToken_return joinToken() throws RecognitionException { return gFromClauseParser.joinToken(); }

	public HiveParser_SelectClauseParser.window_defn_return window_defn() throws RecognitionException { return gSelectClauseParser.window_defn(); }

	public HiveParser_FromClauseParser.tableName_return tableName() throws RecognitionException { return gFromClauseParser.tableName(); }

	public HiveParser_FromClauseParser.valuesTableConstructor_return valuesTableConstructor() throws RecognitionException { return gFromClauseParser.valuesTableConstructor(); }

	public HiveParser_IdentifiersParser.isCondition_return isCondition() throws RecognitionException { return gIdentifiersParser.isCondition(); }

	public HiveParser_FromClauseParser.searchCondition_return searchCondition() throws RecognitionException { return gFromClauseParser.searchCondition(); }

	public HiveParser_IdentifiersParser.timestampLocalTZLiteral_return timestampLocalTZLiteral() throws RecognitionException { return gIdentifiersParser.timestampLocalTZLiteral(); }

	public HiveParser_FromClauseParser.joinSourcePart_return joinSourcePart() throws RecognitionException { return gFromClauseParser.joinSourcePart(); }

	public HiveParser_FromClauseParser.uniqueJoinSource_return uniqueJoinSource() throws RecognitionException { return gFromClauseParser.uniqueJoinSource(); }

	public HiveParser_IdentifiersParser.havingClause_return havingClause() throws RecognitionException { return gIdentifiersParser.havingClause(); }

	public HiveParser_IdentifiersParser.havingCondition_return havingCondition() throws RecognitionException { return gIdentifiersParser.havingCondition(); }

	public HiveParser_IdentifiersParser.timestampLiteral_return timestampLiteral() throws RecognitionException { return gIdentifiersParser.timestampLiteral(); }

	public HiveParser_IdentifiersParser.precedenceStarOperator_return precedenceStarOperator() throws RecognitionException { return gIdentifiersParser.precedenceStarOperator(); }

	public HiveParser_IdentifiersParser.dropPartitionSpec_return dropPartitionSpec() throws RecognitionException { return gIdentifiersParser.dropPartitionSpec(); }

	public HiveParser_IdentifiersParser.expressionsNotInParenthesis_return expressionsNotInParenthesis(boolean isStruct, boolean forceStruct) throws RecognitionException { return gIdentifiersParser.expressionsNotInParenthesis(isStruct, forceStruct); }

	public HiveParser_IdentifiersParser.clusterByClause_return clusterByClause() throws RecognitionException { return gIdentifiersParser.clusterByClause(); }

	public HiveParser_ResourcePlanParser.disable_return disable() throws RecognitionException { return gResourcePlanParser.disable(); }

	public HiveParser_IdentifiersParser.precedenceSimilarOperator_return precedenceSimilarOperator() throws RecognitionException { return gIdentifiersParser.precedenceSimilarOperator(); }

	public HiveParser_IdentifiersParser.rollupOldSyntax_return rollupOldSyntax() throws RecognitionException { return gIdentifiersParser.rollupOldSyntax(); }

	public HiveParser_IdentifiersParser.precedenceSimilarExpressionAtom_return precedenceSimilarExpressionAtom(CommonTree t) throws RecognitionException { return gIdentifiersParser.precedenceSimilarExpressionAtom(t); }

	public HiveParser_FromClauseParser.uniqueJoinToken_return uniqueJoinToken() throws RecognitionException { return gFromClauseParser.uniqueJoinToken(); }

	public HiveParser_SelectClauseParser.window_range_expression_return window_range_expression() throws RecognitionException { return gSelectClauseParser.window_range_expression(); }

	public HiveParser_ResourcePlanParser.replaceResourcePlanStatement_return replaceResourcePlanStatement() throws RecognitionException { return gResourcePlanParser.replaceResourcePlanStatement(); }

	public HiveParser_FromClauseParser.tableAllColumns_return tableAllColumns() throws RecognitionException { return gFromClauseParser.tableAllColumns(); }

	public HiveParser_IdentifiersParser.expressionPart_return expressionPart(CommonTree t, boolean isStruct) throws RecognitionException { return gIdentifiersParser.expressionPart(t, isStruct); }

	public HiveParser_IdentifiersParser.precedenceSimilarExpressionMain_return precedenceSimilarExpressionMain() throws RecognitionException { return gIdentifiersParser.precedenceSimilarExpressionMain(); }

	public HiveParser_IdentifiersParser.partitionByClause_return partitionByClause() throws RecognitionException { return gIdentifiersParser.partitionByClause(); }

	public HiveParser_FromClauseParser.valuesClause_return valuesClause() throws RecognitionException { return gFromClauseParser.valuesClause(); }

	public HiveParser_FromClauseParser.uniqueJoinExpr_return uniqueJoinExpr() throws RecognitionException { return gFromClauseParser.uniqueJoinExpr(); }

	public HiveParser_ResourcePlanParser.poolPath_return poolPath() throws RecognitionException { return gResourcePlanParser.poolPath(); }

	public HiveParser_IdentifiersParser.precedenceFieldExpression_return precedenceFieldExpression() throws RecognitionException { return gIdentifiersParser.precedenceFieldExpression(); }

	public HiveParser_FromClauseParser.joinSource_return joinSource() throws RecognitionException { return gFromClauseParser.joinSource(); }

	public HiveParser_IdentifiersParser.precedenceConcatenateOperator_return precedenceConcatenateOperator() throws RecognitionException { return gIdentifiersParser.precedenceConcatenateOperator(); }

	public HiveParser_IdentifiersParser.precedenceBitwiseOrOperator_return precedenceBitwiseOrOperator() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseOrOperator(); }

	public HiveParser_IdentifiersParser.precedenceBitwiseXorOperator_return precedenceBitwiseXorOperator() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseXorOperator(); }

	public HiveParser_ResourcePlanParser.triggerActionExpression_return triggerActionExpression() throws RecognitionException { return gResourcePlanParser.triggerActionExpression(); }

	public HiveParser_IdentifiersParser.precedenceOrExpression_return precedenceOrExpression() throws RecognitionException { return gIdentifiersParser.precedenceOrExpression(); }

	public HiveParser_ResourcePlanParser.rpUnassignList_return rpUnassignList() throws RecognitionException { return gResourcePlanParser.rpUnassignList(); }

	public HiveParser_IdentifiersParser.atomExpression_return atomExpression() throws RecognitionException { return gIdentifiersParser.atomExpression(); }

	public HiveParser_ResourcePlanParser.triggerExpression_return triggerExpression() throws RecognitionException { return gResourcePlanParser.triggerExpression(); }

	public HiveParser_IdentifiersParser.groupByEmpty_return groupByEmpty() throws RecognitionException { return gIdentifiersParser.groupByEmpty(); }

	public HiveParser_IdentifiersParser.precedenceAndExpression_return precedenceAndExpression() throws RecognitionException { return gIdentifiersParser.precedenceAndExpression(); }

	public HiveParser_FromClauseParser.lateralView_return lateralView() throws RecognitionException { return gFromClauseParser.lateralView(); }

	public HiveParser_FromClauseParser.splitSample_return splitSample() throws RecognitionException { return gFromClauseParser.splitSample(); }

	public HiveParser_ResourcePlanParser.triggerLiteral_return triggerLiteral() throws RecognitionException { return gResourcePlanParser.triggerLiteral(); }

	public HiveParser_ResourcePlanParser.rpAssign_return rpAssign() throws RecognitionException { return gResourcePlanParser.rpAssign(); }

	public HiveParser_FromClauseParser.partitionTableFunctionSource_return partitionTableFunctionSource() throws RecognitionException { return gFromClauseParser.partitionTableFunctionSource(); }

	public HiveParser_IdentifiersParser.precedenceUnaryOperator_return precedenceUnaryOperator() throws RecognitionException { return gIdentifiersParser.precedenceUnaryOperator(); }

	public HiveParser_IdentifiersParser.booleanValueTok_return booleanValueTok() throws RecognitionException { return gIdentifiersParser.booleanValueTok(); }

	public HiveParser_IdentifiersParser.precedenceAndOperator_return precedenceAndOperator() throws RecognitionException { return gIdentifiersParser.precedenceAndOperator(); }

	public HiveParser_IdentifiersParser.precedenceAmpersandOperator_return precedenceAmpersandOperator() throws RecognitionException { return gIdentifiersParser.precedenceAmpersandOperator(); }

	public HiveParser_SelectClauseParser.selectTrfmClause_return selectTrfmClause() throws RecognitionException { return gSelectClauseParser.selectTrfmClause(); }

	public HiveParser_IdentifiersParser.precedenceDistinctOperator_return precedenceDistinctOperator() throws RecognitionException { return gIdentifiersParser.precedenceDistinctOperator(); }

	public HiveParser_IdentifiersParser.precedenceEqualOperator_return precedenceEqualOperator() throws RecognitionException { return gIdentifiersParser.precedenceEqualOperator(); }

	public HiveParser_ResourcePlanParser.triggerAtomExpression_return triggerAtomExpression() throws RecognitionException { return gResourcePlanParser.triggerAtomExpression(); }

	public HiveParser_IdentifiersParser.sql11ReservedKeywordsUsedAsFunctionName_return sql11ReservedKeywordsUsedAsFunctionName() throws RecognitionException { return gIdentifiersParser.sql11ReservedKeywordsUsedAsFunctionName(); }

	public HiveParser_ResourcePlanParser.triggerActionExpressionStandalone_return triggerActionExpressionStandalone() throws RecognitionException { return gResourcePlanParser.triggerActionExpressionStandalone(); }

	public HiveParser_SelectClauseParser.window_frame_boundary_return window_frame_boundary() throws RecognitionException { return gSelectClauseParser.window_frame_boundary(); }

	public HiveParser_ResourcePlanParser.alterTriggerStatement_return alterTriggerStatement() throws RecognitionException { return gResourcePlanParser.alterTriggerStatement(); }

	public HiveParser_ResourcePlanParser.alterResourcePlanStatement_return alterResourcePlanStatement() throws RecognitionException { return gResourcePlanParser.alterResourcePlanStatement(); }

	public HiveParser_IdentifiersParser.precedenceSimilarExpressionIn_return precedenceSimilarExpressionIn(CommonTree t) throws RecognitionException { return gIdentifiersParser.precedenceSimilarExpressionIn(t); }

	public HiveParser_ResourcePlanParser.poolAssignList_return poolAssignList() throws RecognitionException { return gResourcePlanParser.poolAssignList(); }

	public HiveParser_SelectClauseParser.selectItem_return selectItem() throws RecognitionException { return gSelectClauseParser.selectItem(); }

	public HiveParser_ResourcePlanParser.dropTriggerStatement_return dropTriggerStatement() throws RecognitionException { return gResourcePlanParser.dropTriggerStatement(); }

	public HiveParser_IdentifiersParser.orderByClause_return orderByClause() throws RecognitionException { return gIdentifiersParser.orderByClause(); }

	public HiveParser_IdentifiersParser.timeQualifiers_return timeQualifiers() throws RecognitionException { return gIdentifiersParser.timeQualifiers(); }

	public HiveParser_FromClauseParser.tableOrColumn_return tableOrColumn() throws RecognitionException { return gFromClauseParser.tableOrColumn(); }

	public HiveParser_IdentifiersParser.floorDateQualifiers_return floorDateQualifiers() throws RecognitionException { return gIdentifiersParser.floorDateQualifiers(); }

	public HiveParser_IdentifiersParser.precedenceUnarySuffixExpression_return precedenceUnarySuffixExpression() throws RecognitionException { return gIdentifiersParser.precedenceUnarySuffixExpression(); }

	public HiveParser_IdentifiersParser.distributeByClause_return distributeByClause() throws RecognitionException { return gIdentifiersParser.distributeByClause(); }

	public HiveParser_IdentifiersParser.precedenceSimilarExpressionPartNot_return precedenceSimilarExpressionPartNot(CommonTree t) throws RecognitionException { return gIdentifiersParser.precedenceSimilarExpressionPartNot(t); }

	public HiveParser_IdentifiersParser.expressionsInParenthesis_return expressionsInParenthesis(boolean isStruct, boolean forceStruct) throws RecognitionException { return gIdentifiersParser.expressionsInParenthesis(isStruct, forceStruct); }

	public HiveParser_IdentifiersParser.groupByClause_return groupByClause() throws RecognitionException { return gIdentifiersParser.groupByClause(); }

	public HiveParser_FromClauseParser.tableAlias_return tableAlias() throws RecognitionException { return gFromClauseParser.tableAlias(); }

	public HiveParser_IdentifiersParser.precedenceNotOperator_return precedenceNotOperator() throws RecognitionException { return gIdentifiersParser.precedenceNotOperator(); }

	public HiveParser_SelectClauseParser.window_frame_start_boundary_return window_frame_start_boundary() throws RecognitionException { return gSelectClauseParser.window_frame_start_boundary(); }

	public HiveParser_ResourcePlanParser.createMappingStatement_return createMappingStatement() throws RecognitionException { return gResourcePlanParser.createMappingStatement(); }

	public HiveParser_IdentifiersParser.functionIdentifier_return functionIdentifier() throws RecognitionException { return gIdentifiersParser.functionIdentifier(); }

	public HiveParser_FromClauseParser.tableBucketSample_return tableBucketSample() throws RecognitionException { return gFromClauseParser.tableBucketSample(); }

	public HiveParser_IdentifiersParser.nonReserved_return nonReserved() throws RecognitionException { return gIdentifiersParser.nonReserved(); }

	public HiveParser_FromClauseParser.uniqueJoinTableSource_return uniqueJoinTableSource() throws RecognitionException { return gFromClauseParser.uniqueJoinTableSource(); }

	public HiveParser_ResourcePlanParser.rpUnassign_return rpUnassign() throws RecognitionException { return gResourcePlanParser.rpUnassign(); }

	public HiveParser_IdentifiersParser.dropPartitionVal_return dropPartitionVal() throws RecognitionException { return gIdentifiersParser.dropPartitionVal(); }

	public HiveParser_FromClauseParser.whereClause_return whereClause() throws RecognitionException { return gFromClauseParser.whereClause(); }

	public HiveParser_ResourcePlanParser.dropPoolStatement_return dropPoolStatement() throws RecognitionException { return gResourcePlanParser.dropPoolStatement(); }

	public HiveParser_FromClauseParser.fromSource_return fromSource() throws RecognitionException { return gFromClauseParser.fromSource(); }

	public HiveParser_FromClauseParser.tableSource_return tableSource() throws RecognitionException { return gFromClauseParser.tableSource(); }

	public HiveParser_IdentifiersParser.constant_return constant() throws RecognitionException { return gIdentifiersParser.constant(); }

	public HiveParser_IdentifiersParser.rollupStandard_return rollupStandard() throws RecognitionException { return gIdentifiersParser.rollupStandard(); }

	public HiveParser_FromClauseParser.fromClause_return fromClause() throws RecognitionException { return gFromClauseParser.fromClause(); }

	public HiveParser_SelectClauseParser.selectExpression_return selectExpression() throws RecognitionException { return gSelectClauseParser.selectExpression(); }

	public HiveParser_IdentifiersParser.precedencePlusOperator_return precedencePlusOperator() throws RecognitionException { return gIdentifiersParser.precedencePlusOperator(); }

	public HiveParser_IdentifiersParser.partitionVal_return partitionVal() throws RecognitionException { return gIdentifiersParser.partitionVal(); }

	public HiveParser_ResourcePlanParser.comparisionOperator_return comparisionOperator() throws RecognitionException { return gResourcePlanParser.comparisionOperator(); }

	public HiveParser_IdentifiersParser.principalIdentifier_return principalIdentifier() throws RecognitionException { return gIdentifiersParser.principalIdentifier(); }

	public HiveParser_IdentifiersParser.precedenceRegexpOperator_return precedenceRegexpOperator() throws RecognitionException { return gIdentifiersParser.precedenceRegexpOperator(); }

	public HiveParser_IdentifiersParser.intervalQualifiers_return intervalQualifiers() throws RecognitionException { return gIdentifiersParser.intervalQualifiers(); }

	public HiveParser_IdentifiersParser.precedenceConcatenateExpression_return precedenceConcatenateExpression() throws RecognitionException { return gIdentifiersParser.precedenceConcatenateExpression(); }

	public HiveParser_IdentifiersParser.precedenceStarExpression_return precedenceStarExpression() throws RecognitionException { return gIdentifiersParser.precedenceStarExpression(); }

	public HiveParser_IdentifiersParser.identifier_return identifier() throws RecognitionException { return gIdentifiersParser.identifier(); }

	public HiveParser_IdentifiersParser.columnRefOrderInParenthesis_return columnRefOrderInParenthesis() throws RecognitionException { return gIdentifiersParser.columnRefOrderInParenthesis(); }

	public HiveParser_SelectClauseParser.window_value_expression_return window_value_expression() throws RecognitionException { return gSelectClauseParser.window_value_expression(); }

	public HiveParser_IdentifiersParser.floorExpression_return floorExpression() throws RecognitionException { return gIdentifiersParser.floorExpression(); }

	public HiveParser_IdentifiersParser.booleanValue_return booleanValue() throws RecognitionException { return gIdentifiersParser.booleanValue(); }

	public HiveParser_IdentifiersParser.precedenceAmpersandExpression_return precedenceAmpersandExpression() throws RecognitionException { return gIdentifiersParser.precedenceAmpersandExpression(); }

	public HiveParser_ResourcePlanParser.createPoolStatement_return createPoolStatement() throws RecognitionException { return gResourcePlanParser.createPoolStatement(); }

	public HiveParser_FromClauseParser.partitionedTableFunction_return partitionedTableFunction() throws RecognitionException { return gFromClauseParser.partitionedTableFunction(); }

	public final boolean synpred18_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred18_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred21_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred21_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred7_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred7_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred11_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred11_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred15_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred15_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred13_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred13_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred10_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred10_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred8_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred8_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred4_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred4_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred2_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred2_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred6_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred6_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred19_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred19_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred14_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred14_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred17_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred17_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred20_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred20_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred12_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred12_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred3_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred3_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred9_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred9_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred16_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred16_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred5_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred5_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}
	public final boolean synpred1_HiveParser() {
		state.backtracking++;
		int start = input.mark();
		try {
			synpred1_HiveParser_fragment(); // can never throw exception
		} catch (RecognitionException re) {
			System.err.println("impossible: "+re);
		}
		boolean success = !state.failed;
		input.rewind(start);
		state.backtracking--;
		state.failed=false;
		return success;
	}


	protected DFA2 dfa2 = new DFA2(this);
	protected DFA28 dfa28 = new DFA28(this);
	protected DFA226 dfa226 = new DFA226(this);
	protected DFA270 dfa270 = new DFA270(this);
	static final String DFA2_eotS =
		"\134\uffff";
	static final String DFA2_eofS =
		"\134\uffff";
	static final String DFA2_minS =
		"\1\33\25\uffff\1\33\105\uffff";
	static final String DFA2_maxS =
		"\1\u016a\25\uffff\1\u016a\105\uffff";
	static final String DFA2_acceptS =
		"\1\uffff\1\2\45\uffff\1\1\64\uffff";
	static final String DFA2_specialS =
		"\134\uffff}>";
	static final String[] DFA2_transitionS = {
			"\1\1\7\uffff\1\1\1\26\7\uffff\1\47\30\uffff\1\1\7\uffff\1\1\21\uffff"+
			"\1\1\1\uffff\1\47\2\1\3\uffff\1\1\5\uffff\1\1\3\uffff\1\1\10\uffff\1"+
			"\1\1\uffff\1\47\16\uffff\1\47\1\1\3\uffff\1\1\6\uffff\1\1\7\uffff\1\1"+
			"\14\uffff\1\1\10\uffff\1\1\2\uffff\1\1\1\uffff\1\47\3\uffff\1\1\4\uffff"+
			"\1\1\6\uffff\1\1\57\uffff\1\1\2\uffff\1\1\2\uffff\1\47\1\uffff\2\1\3"+
			"\uffff\1\1\5\uffff\1\1\7\uffff\1\1\4\uffff\1\1\2\uffff\1\1\7\uffff\1"+
			"\1\32\uffff\1\1\7\uffff\1\1\3\uffff\1\1\1\uffff\1\1\10\uffff\1\47\10"+
			"\uffff\1\1\11\uffff\1\1",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"\1\47\7\uffff\2\47\7\uffff\1\47\30\uffff\1\47\7\uffff\1\47\21\uffff"+
			"\1\47\1\uffff\3\47\3\uffff\1\47\5\uffff\1\47\3\uffff\1\47\10\uffff\1"+
			"\47\1\uffff\1\47\16\uffff\2\47\3\uffff\1\47\6\uffff\1\47\7\uffff\1\47"+
			"\14\uffff\1\47\10\uffff\1\47\2\uffff\1\47\1\uffff\1\47\3\uffff\1\47\4"+
			"\uffff\1\47\6\uffff\1\47\57\uffff\1\47\2\uffff\1\47\2\uffff\1\47\1\uffff"+
			"\2\47\3\uffff\1\47\5\uffff\1\47\7\uffff\1\47\4\uffff\1\47\2\uffff\1\47"+
			"\7\uffff\1\47\10\uffff\1\1\21\uffff\1\47\7\uffff\1\47\3\uffff\1\47\1"+
			"\uffff\1\47\10\uffff\1\47\10\uffff\1\47\11\uffff\1\47",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			""
	};

	static final short[] DFA2_eot = DFA.unpackEncodedString(DFA2_eotS);
	static final short[] DFA2_eof = DFA.unpackEncodedString(DFA2_eofS);
	static final char[] DFA2_min = DFA.unpackEncodedStringToUnsignedChars(DFA2_minS);
	static final char[] DFA2_max = DFA.unpackEncodedStringToUnsignedChars(DFA2_maxS);
	static final short[] DFA2_accept = DFA.unpackEncodedString(DFA2_acceptS);
	static final short[] DFA2_special = DFA.unpackEncodedString(DFA2_specialS);
	static final short[][] DFA2_transition;

	static {
		int numStates = DFA2_transitionS.length;
		DFA2_transition = new short[numStates][];
		for (int i=0; i";
	static final String[] DFA28_transitionS = {
			"\1\21\7\uffff\1\5\1\13\50\uffff\1\1\24\uffff\2\6\3\uffff\1\23\5\uffff"+
			"\1\3\3\uffff\1\23\36\uffff\1\16\33\uffff\1\22\13\uffff\1\14\21\uffff"+
			"\1\11\62\uffff\1\12\5\uffff\1\23\3\uffff\1\17\22\uffff\1\20\2\uffff\1"+
			"\10\42\uffff\1\4\7\uffff\1\15\5\uffff\1\2",
			"\1\23\55\uffff\1\34\47\uffff\1\36\20\uffff\1\42\2\uffff\1\23\60\uffff"+
			"\1\26\25\uffff\1\40\20\uffff\1\23\33\uffff\1\23\5\uffff\1\30\6\uffff"+
			"\1\34\33\uffff\1\36\3\uffff\1\27\13\uffff\1\23\20\uffff\1\23\10\uffff"+
			"\1\40",
			"",
			"\1\23\55\uffff\1\56\70\uffff\1\60\2\uffff\1\23\60\uffff\1\50\46\uffff"+
			"\1\23\33\uffff\1\23\5\uffff\1\52\6\uffff\1\56\33\uffff\1\46\3\uffff\1"+
			"\51\13\uffff\1\23\20\uffff\1\23\10\uffff\1\47",
			"",
			"\1\23\55\uffff\1\64\73\uffff\1\23\60\uffff\1\64\46\uffff\1\23\33\uffff"+
			"\1\23\14\uffff\1\64\33\uffff\1\64\17\uffff\1\23\20\uffff\1\23\10\uffff"+
			"\1\64",
			"",
			"",
			"\1\77\3\uffff\1\77\2\uffff\1\77\2\uffff\1\77\2\uffff\1\121\5\uffff\1"+
			"\77\70\uffff\1\77\1\115\50\uffff\1\77\10\uffff\1\77\40\uffff\1\77\12"+
			"\uffff\1\117\26\uffff\1\77\5\uffff\1\116\1\120\6\uffff\1\77\32\uffff"+
			"\2\77\1\uffff\1\77\12\uffff\1\77\34\uffff\1\77",
			"",
			"",
			"",
			"\1\125\u00bc\uffff\1\125\33\uffff\1\124",
			"\1\130\u00bc\uffff\1\130\33\uffff\1\127",
			"\1\144\1\uffff\6\144\1\132\1\144\1\133\1\144\3\uffff\1\144\2\uffff\1"+
			"\144\1\uffff\2\144\5\uffff\2\144\1\uffff\2\144\2\uffff\1\144\1\uffff"+
			"\5\144\1\uffff\2\144\1\uffff\4\144\2\uffff\1\144\1\135\6\uffff\1\144"+
			"\1\uffff\1\144\1\uffff\3\144\1\uffff\3\144\1\143\3\144\1\uffff\4\144"+
			"\1\uffff\2\144\1\uffff\1\144\1\136\2\144\1\uffff\1\144\1\uffff\2\144"+
			"\2\uffff\1\144\1\uffff\3\144\5\uffff\4\144\5\uffff\2\144\3\uffff\1\144"+
			"\4\uffff\2\144\3\uffff\2\144\1\uffff\3\144\1\142\5\uffff\3\144\1\uffff"+
			"\5\144\3\uffff\1\144\1\uffff\3\144\1\uffff\1\144\1\137\3\144\1\uffff"+
			"\1\144\1\uffff\4\144\1\uffff\1\144\1\uffff\2\144\1\uffff\2\144\1\uffff"+
			"\2\144\1\uffff\1\144\1\uffff\1\144\1\uffff\1\144\2\uffff\2\144\4\uffff"+
			"\2\144\1\uffff\2\144\1\uffff\3\144\1\uffff\4\144\4\uffff\1\144\1\uffff"+
			"\4\144\1\uffff\1\144\1\uffff\3\144\3\uffff\12\144\1\uffff\1\144\2\uffff"+
			"\2\144\4\uffff\4\144\1\140\4\144\1\uffff\3\144\1\141\1\144\1\uffff\4"+
			"\144\1\uffff\7\144\2\uffff\1\144\1\uffff\3\144\4\uffff\1\144\1\uffff"+
			"\3\144\4\uffff\1\144\1\uffff\1\144\1\uffff\1\144\2\uffff\4\144\1\134"+
			"\2\144\2\uffff\3\144\1\uffff\1\144\1\uffff\5\144\2\uffff\1\144\2\uffff"+
			"\5\144\76\uffff\1\144\44\uffff\1\144\56\uffff\1\144\3\uffff\1\144\56"+
			"\uffff\1\144\3\uffff\1\144\31\uffff\1\144\6\uffff\1\144\73\uffff\1\144",
			"\1\162\1\uffff\6\162\1\150\1\162\1\151\1\162\3\uffff\1\162\2\uffff\1"+
			"\162\1\uffff\2\162\5\uffff\2\162\1\uffff\2\162\2\uffff\1\162\1\uffff"+
			"\5\162\1\uffff\2\162\1\uffff\4\162\2\uffff\1\162\1\153\6\uffff\1\162"+
			"\1\uffff\1\162\1\uffff\3\162\1\uffff\3\162\1\161\3\162\1\uffff\4\162"+
			"\1\uffff\2\162\1\uffff\1\162\1\154\2\162\1\uffff\1\162\1\uffff\2\162"+
			"\2\uffff\1\162\1\uffff\3\162\5\uffff\4\162\5\uffff\2\162\3\uffff\1\162"+
			"\1\147\3\uffff\2\162\3\uffff\2\162\1\uffff\3\162\1\160\5\uffff\3\162"+
			"\1\uffff\5\162\3\uffff\1\162\1\uffff\3\162\1\uffff\1\162\1\155\3\162"+
			"\1\uffff\1\162\1\uffff\4\162\1\uffff\1\162\1\uffff\2\162\1\uffff\2\162"+
			"\1\uffff\2\162\1\uffff\1\162\1\uffff\1\162\1\uffff\1\162\2\uffff\2\162"+
			"\4\uffff\2\162\1\uffff\2\162\1\uffff\3\162\1\uffff\4\162\4\uffff\1\162"+
			"\1\uffff\4\162\1\uffff\1\162\1\uffff\3\162\3\uffff\12\162\1\uffff\1\162"+
			"\2\uffff\2\162\4\uffff\4\162\1\156\4\162\1\uffff\3\162\1\157\1\162\1"+
			"\uffff\4\162\1\uffff\7\162\2\uffff\1\162\1\uffff\3\162\4\uffff\1\162"+
			"\1\uffff\3\162\4\uffff\1\162\1\uffff\1\162\1\uffff\1\162\2\uffff\4\162"+
			"\1\152\2\162\2\uffff\3\162\1\uffff\1\162\1\uffff\5\162\2\uffff\1\162"+
			"\2\uffff\5\162\76\uffff\1\162\44\uffff\1\162\56\uffff\1\162\3\uffff\1"+
			"\162\56\uffff\1\162\3\uffff\1\162\31\uffff\1\162\6\uffff\1\162\73\uffff"+
			"\1\162",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"\1\36\20\uffff\1\42\55\uffff\1\166\161\uffff\1\36",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"\1\60\55\uffff\1\172",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"\1\175\u00c9\uffff\1\176\144\uffff\1\177\60\uffff\1\174",
			"",
			"\1\u0081\u00c9\uffff\1\u0082\144\uffff\1\u0083\60\uffff\1\u0080",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"\1\u0085\u0081\uffff\1\u0087\107\uffff\1\u0086\u0095\uffff\1\u0084",
			"",
			"\1\u0089\u0081\uffff\1\u008b\107\uffff\1\u008a\u0095\uffff\1\u0088",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"\1\uffff",
			"",
			"\1\uffff",
			"",
			"\1\uffff",
			"",
			"\1\uffff",
			"",
			"\1\uffff",
			"",
			"\1\uffff",
			"",
			"\1\uffff",
			"",
			"\1\uffff"
	};

	static final short[] DFA28_eot = DFA.unpackEncodedString(DFA28_eotS);
	static final short[] DFA28_eof = DFA.unpackEncodedString(DFA28_eofS);
	static final char[] DFA28_min = DFA.unpackEncodedStringToUnsignedChars(DFA28_minS);
	static final char[] DFA28_max = DFA.unpackEncodedStringToUnsignedChars(DFA28_maxS);
	static final short[] DFA28_accept = DFA.unpackEncodedString(DFA28_acceptS);
	static final short[] DFA28_special = DFA.unpackEncodedString(DFA28_specialS);
	static final short[][] DFA28_transition;

	static {
		int numStates = DFA28_transitionS.length;
		DFA28_transition = new short[numStates][];
		for (int i=0; i grantPrivileges | ( revokePrivileges )=> revokePrivileges | showGrants | showRoleGrants | showRolePrincipals | showRoles | grantRole | revokeRole | setRole | showCurrentRole | abortTransactionStatement | killQueryStatement | resourcePlanDdlStatements );";
		}
		@Override
		public int specialStateTransition(int s, IntStream _input) throws NoViableAltException {
			TokenStream input = (TokenStream)_input;
			int _s = s;
			switch ( s ) {
					case 0 : 
						int LA28_14 = input.LA(1);
						 
						int index28_14 = input.index();
						input.rewind();
						s = -1;
						if ( (LA28_14==KW_ALL) && (synpred1_HiveParser())) {s = 90;}
						else if ( (LA28_14==KW_ALTER) && (synpred1_HiveParser())) {s = 91;}
						else if ( (LA28_14==KW_UPDATE) && (synpred1_HiveParser())) {s = 92;}
						else if ( (LA28_14==KW_CREATE) && (synpred1_HiveParser())) {s = 93;}
						else if ( (LA28_14==KW_DROP) && (synpred1_HiveParser())) {s = 94;}
						else if ( (LA28_14==KW_LOCK) ) {s = 95;}
						else if ( (LA28_14==KW_SELECT) && (synpred1_HiveParser())) {s = 96;}
						else if ( (LA28_14==KW_SHOW_DATABASE) ) {s = 97;}
						else if ( (LA28_14==KW_INSERT) && (synpred1_HiveParser())) {s = 98;}
						else if ( (LA28_14==KW_DELETE) && (synpred1_HiveParser())) {s = 99;}
						else if ( (LA28_14==Identifier||(LA28_14 >= KW_ABORT && LA28_14 <= KW_AFTER)||LA28_14==KW_ALLOC_FRACTION||LA28_14==KW_ANALYZE||LA28_14==KW_ARCHIVE||LA28_14==KW_ASC||(LA28_14 >= KW_AUTOCOMMIT && LA28_14 <= KW_BEFORE)||(LA28_14 >= KW_BUCKET && LA28_14 <= KW_BUCKETS)||(LA28_14 >= KW_CACHE && LA28_14 <= KW_CASCADE)||LA28_14==KW_CHANGE||(LA28_14 >= KW_CHECK && LA28_14 <= KW_COLLECTION)||(LA28_14 >= KW_COLUMNS && LA28_14 <= KW_COMMENT)||(LA28_14 >= KW_COMPACT && LA28_14 <= KW_CONCATENATE)||LA28_14==KW_CONTINUE||LA28_14==KW_DATA||LA28_14==KW_DATABASES||(LA28_14 >= KW_DATETIME && LA28_14 <= KW_DBPROPERTIES)||(LA28_14 >= KW_DEFAULT && LA28_14 <= KW_DEFINED)||(LA28_14 >= KW_DELIMITED && LA28_14 <= KW_DESC)||(LA28_14 >= KW_DETAIL && LA28_14 <= KW_DISABLE)||(LA28_14 >= KW_DISTRIBUTE && LA28_14 <= KW_DO)||LA28_14==KW_DOW||(LA28_14 >= KW_DUMP && LA28_14 <= KW_ELEM_TYPE)||LA28_14==KW_ENABLE||(LA28_14 >= KW_ENFORCED && LA28_14 <= KW_ESCAPED)||LA28_14==KW_EXCLUSIVE||(LA28_14 >= KW_EXPLAIN && LA28_14 <= KW_EXPRESSION)||(LA28_14 >= KW_FIELDS && LA28_14 <= KW_FIRST)||(LA28_14 >= KW_FORMAT && LA28_14 <= KW_FORMATTED)||LA28_14==KW_FUNCTIONS||(LA28_14 >= KW_HOUR && LA28_14 <= KW_IDXPROPERTIES)||(LA28_14 >= KW_INDEX && LA28_14 <= KW_INDEXES)||(LA28_14 >= KW_INPATH && LA28_14 <= KW_INPUTFORMAT)||(LA28_14 >= KW_ISOLATION && LA28_14 <= KW_JAR)||(LA28_14 >= KW_KEY && LA28_14 <= KW_LAST)||LA28_14==KW_LEVEL||(LA28_14 >= KW_LIMIT && LA28_14 <= KW_LOAD)||LA28_14==KW_LOCATION||(LA28_14 >= KW_LOCKS && LA28_14 <= KW_LONG)||LA28_14==KW_MANAGEMENT||(LA28_14 >= KW_MAPJOIN && LA28_14 <= KW_MATERIALIZED)||LA28_14==KW_METADATA||(LA28_14 >= KW_MINUTE && LA28_14 <= KW_MONTH)||(LA28_14 >= KW_MOVE && LA28_14 <= KW_MSCK)||(LA28_14 >= KW_NORELY && LA28_14 <= KW_NOSCAN)||LA28_14==KW_NOVALIDATE||LA28_14==KW_NULLS||LA28_14==KW_OFFSET||(LA28_14 >= KW_OPERATOR && LA28_14 <= KW_OPTION)||(LA28_14 >= KW_OUTPUTDRIVER && LA28_14 <= KW_OUTPUTFORMAT)||(LA28_14 >= KW_OVERWRITE && LA28_14 <= KW_OWNER)||(LA28_14 >= KW_PARTITIONED && LA28_14 <= KW_PATH)||(LA28_14 >= KW_PLAN && LA28_14 <= KW_POOL)||LA28_14==KW_PRINCIPALS||(LA28_14 >= KW_PURGE && LA28_14 <= KW_QUERY_PARALLELISM)||LA28_14==KW_READ||(LA28_14 >= KW_REBUILD && LA28_14 <= KW_RECORDWRITER)||(LA28_14 >= KW_RELOAD && LA28_14 <= KW_RESTRICT)||LA28_14==KW_REWRITE||(LA28_14 >= KW_ROLE && LA28_14 <= KW_ROLES)||(LA28_14 >= KW_SCHEDULING_POLICY && LA28_14 <= KW_SECOND)||(LA28_14 >= KW_SEMI && LA28_14 <= KW_SERVER)||(LA28_14 >= KW_SETS && LA28_14 <= KW_SHOW)||LA28_14==KW_SKEWED||(LA28_14 >= KW_SNAPSHOT && LA28_14 <= KW_SSL)||(LA28_14 >= KW_STATISTICS && LA28_14 <= KW_SUMMARY)||LA28_14==KW_TABLES||(LA28_14 >= KW_TBLPROPERTIES && LA28_14 <= KW_TERMINATED)||LA28_14==KW_TINYINT||(LA28_14 >= KW_TOUCH && LA28_14 <= KW_TRANSACTIONS)||LA28_14==KW_UNARCHIVE||LA28_14==KW_UNDO||LA28_14==KW_UNIONTYPE||(LA28_14 >= KW_UNLOCK && LA28_14 <= KW_UNSIGNED)||(LA28_14 >= KW_URI && LA28_14 <= KW_USE)||(LA28_14 >= KW_UTC && LA28_14 <= KW_VALIDATE)||LA28_14==KW_VALUE_TYPE||(LA28_14 >= KW_VECTORIZATION && LA28_14 <= KW_WEEK)||LA28_14==KW_WHILE||(LA28_14 >= KW_WORK && LA28_14 <= KW_ZONE)||LA28_14==KW_BATCH||LA28_14==KW_DAYOFWEEK||LA28_14==KW_HOLD_DDLTIME||LA28_14==KW_IGNORE||LA28_14==KW_NO_DROP||LA28_14==KW_OFFLINE||LA28_14==KW_PROTECTION||LA28_14==KW_READONLY||LA28_14==KW_TIMESTAMPTZ) ) {s = 100;}
						 
						input.seek(index28_14);
						if ( s>=0 ) return s;
						break;

					case 1 : 
						int LA28_15 = input.LA(1);
						 
						int index28_15 = input.index();
						input.rewind();
						s = -1;
						if ( (LA28_15==KW_GRANT) && (synpred2_HiveParser())) {s = 103;}
						else if ( (LA28_15==KW_ALL) && (synpred2_HiveParser())) {s = 104;}
						else if ( (LA28_15==KW_ALTER) && (synpred2_HiveParser())) {s = 105;}
						else if ( (LA28_15==KW_UPDATE) && (synpred2_HiveParser())) {s = 106;}
						else if ( (LA28_15==KW_CREATE) && (synpred2_HiveParser())) {s = 107;}
						else if ( (LA28_15==KW_DROP) && (synpred2_HiveParser())) {s = 108;}
						else if ( (LA28_15==KW_LOCK) ) {s = 109;}
						else if ( (LA28_15==KW_SELECT) && (synpred2_HiveParser())) {s = 110;}
						else if ( (LA28_15==KW_SHOW_DATABASE) ) {s = 111;}
						else if ( (LA28_15==KW_INSERT) && (synpred2_HiveParser())) {s = 112;}
						else if ( (LA28_15==KW_DELETE) && (synpred2_HiveParser())) {s = 113;}
						else if ( (LA28_15==Identifier||(LA28_15 >= KW_ABORT && LA28_15 <= KW_AFTER)||LA28_15==KW_ALLOC_FRACTION||LA28_15==KW_ANALYZE||LA28_15==KW_ARCHIVE||LA28_15==KW_ASC||(LA28_15 >= KW_AUTOCOMMIT && LA28_15 <= KW_BEFORE)||(LA28_15 >= KW_BUCKET && LA28_15 <= KW_BUCKETS)||(LA28_15 >= KW_CACHE && LA28_15 <= KW_CASCADE)||LA28_15==KW_CHANGE||(LA28_15 >= KW_CHECK && LA28_15 <= KW_COLLECTION)||(LA28_15 >= KW_COLUMNS && LA28_15 <= KW_COMMENT)||(LA28_15 >= KW_COMPACT && LA28_15 <= KW_CONCATENATE)||LA28_15==KW_CONTINUE||LA28_15==KW_DATA||LA28_15==KW_DATABASES||(LA28_15 >= KW_DATETIME && LA28_15 <= KW_DBPROPERTIES)||(LA28_15 >= KW_DEFAULT && LA28_15 <= KW_DEFINED)||(LA28_15 >= KW_DELIMITED && LA28_15 <= KW_DESC)||(LA28_15 >= KW_DETAIL && LA28_15 <= KW_DISABLE)||(LA28_15 >= KW_DISTRIBUTE && LA28_15 <= KW_DO)||LA28_15==KW_DOW||(LA28_15 >= KW_DUMP && LA28_15 <= KW_ELEM_TYPE)||LA28_15==KW_ENABLE||(LA28_15 >= KW_ENFORCED && LA28_15 <= KW_ESCAPED)||LA28_15==KW_EXCLUSIVE||(LA28_15 >= KW_EXPLAIN && LA28_15 <= KW_EXPRESSION)||(LA28_15 >= KW_FIELDS && LA28_15 <= KW_FIRST)||(LA28_15 >= KW_FORMAT && LA28_15 <= KW_FORMATTED)||LA28_15==KW_FUNCTIONS||(LA28_15 >= KW_HOUR && LA28_15 <= KW_IDXPROPERTIES)||(LA28_15 >= KW_INDEX && LA28_15 <= KW_INDEXES)||(LA28_15 >= KW_INPATH && LA28_15 <= KW_INPUTFORMAT)||(LA28_15 >= KW_ISOLATION && LA28_15 <= KW_JAR)||(LA28_15 >= KW_KEY && LA28_15 <= KW_LAST)||LA28_15==KW_LEVEL||(LA28_15 >= KW_LIMIT && LA28_15 <= KW_LOAD)||LA28_15==KW_LOCATION||(LA28_15 >= KW_LOCKS && LA28_15 <= KW_LONG)||LA28_15==KW_MANAGEMENT||(LA28_15 >= KW_MAPJOIN && LA28_15 <= KW_MATERIALIZED)||LA28_15==KW_METADATA||(LA28_15 >= KW_MINUTE && LA28_15 <= KW_MONTH)||(LA28_15 >= KW_MOVE && LA28_15 <= KW_MSCK)||(LA28_15 >= KW_NORELY && LA28_15 <= KW_NOSCAN)||LA28_15==KW_NOVALIDATE||LA28_15==KW_NULLS||LA28_15==KW_OFFSET||(LA28_15 >= KW_OPERATOR && LA28_15 <= KW_OPTION)||(LA28_15 >= KW_OUTPUTDRIVER && LA28_15 <= KW_OUTPUTFORMAT)||(LA28_15 >= KW_OVERWRITE && LA28_15 <= KW_OWNER)||(LA28_15 >= KW_PARTITIONED && LA28_15 <= KW_PATH)||(LA28_15 >= KW_PLAN && LA28_15 <= KW_POOL)||LA28_15==KW_PRINCIPALS||(LA28_15 >= KW_PURGE && LA28_15 <= KW_QUERY_PARALLELISM)||LA28_15==KW_READ||(LA28_15 >= KW_REBUILD && LA28_15 <= KW_RECORDWRITER)||(LA28_15 >= KW_RELOAD && LA28_15 <= KW_RESTRICT)||LA28_15==KW_REWRITE||(LA28_15 >= KW_ROLE && LA28_15 <= KW_ROLES)||(LA28_15 >= KW_SCHEDULING_POLICY && LA28_15 <= KW_SECOND)||(LA28_15 >= KW_SEMI && LA28_15 <= KW_SERVER)||(LA28_15 >= KW_SETS && LA28_15 <= KW_SHOW)||LA28_15==KW_SKEWED||(LA28_15 >= KW_SNAPSHOT && LA28_15 <= KW_SSL)||(LA28_15 >= KW_STATISTICS && LA28_15 <= KW_SUMMARY)||LA28_15==KW_TABLES||(LA28_15 >= KW_TBLPROPERTIES && LA28_15 <= KW_TERMINATED)||LA28_15==KW_TINYINT||(LA28_15 >= KW_TOUCH && LA28_15 <= KW_TRANSACTIONS)||LA28_15==KW_UNARCHIVE||LA28_15==KW_UNDO||LA28_15==KW_UNIONTYPE||(LA28_15 >= KW_UNLOCK && LA28_15 <= KW_UNSIGNED)||(LA28_15 >= KW_URI && LA28_15 <= KW_USE)||(LA28_15 >= KW_UTC && LA28_15 <= KW_VALIDATE)||LA28_15==KW_VALUE_TYPE||(LA28_15 >= KW_VECTORIZATION && LA28_15 <= KW_WEEK)||LA28_15==KW_WHILE||(LA28_15 >= KW_WORK && LA28_15 <= KW_ZONE)||LA28_15==KW_BATCH||LA28_15==KW_DAYOFWEEK||LA28_15==KW_HOLD_DDLTIME||LA28_15==KW_IGNORE||LA28_15==KW_NO_DROP||LA28_15==KW_OFFLINE||LA28_15==KW_PROTECTION||LA28_15==KW_READONLY||LA28_15==KW_TIMESTAMPTZ) ) {s = 114;}
						 
						input.seek(index28_15);
						if ( s>=0 ) return s;
						break;

					case 2 : 
						int LA28_95 = input.LA(1);
						 
						int index28_95 = input.index();
						input.rewind();
						s = -1;
						if ( (LA28_95==LPAREN) && (synpred1_HiveParser())) {s = 124;}
						else if ( (LA28_95==COMMA) ) {s = 125;}
						else if ( (LA28_95==KW_ON) && (synpred1_HiveParser())) {s = 126;}
						else if ( (LA28_95==KW_TO) ) {s = 127;}
						 
						input.seek(index28_95);
						if ( s>=0 ) return s;
						break;

					case 3 : 
						int LA28_97 = input.LA(1);
						 
						int index28_97 = input.index();
						input.rewind();
						s = -1;
						if ( (LA28_97==LPAREN) && (synpred1_HiveParser())) {s = 128;}
						else if ( (LA28_97==COMMA) ) {s = 129;}
						else if ( (LA28_97==KW_ON) && (synpred1_HiveParser())) {s = 130;}
						else if ( (LA28_97==KW_TO) ) {s = 131;}
						 
						input.seek(index28_97);
						if ( s>=0 ) return s;
						break;

					case 4 : 
						int LA28_109 = input.LA(1);
						 
						int index28_109 = input.index();
						input.rewind();
						s = -1;
						if ( (LA28_109==LPAREN) && (synpred2_HiveParser())) {s = 132;}
						else if ( (LA28_109==COMMA) ) {s = 133;}
						else if ( (LA28_109==KW_ON) && (synpred2_HiveParser())) {s = 134;}
						else if ( (LA28_109==KW_FROM) ) {s = 135;}
						 
						input.seek(index28_109);
						if ( s>=0 ) return s;
						break;

					case 5 : 
						int LA28_111 = input.LA(1);
						 
						int index28_111 = input.index();
						input.rewind();
						s = -1;
						if ( (LA28_111==LPAREN) && (synpred2_HiveParser())) {s = 136;}
						else if ( (LA28_111==COMMA) ) {s = 137;}
						else if ( (LA28_111==KW_ON) && (synpred2_HiveParser())) {s = 138;}
						else if ( (LA28_111==KW_FROM) ) {s = 139;}
						 
						input.seek(index28_111);
						if ( s>=0 ) return s;
						break;

					case 6 : 
						int LA28_125 = input.LA(1);
						 
						int index28_125 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred1_HiveParser()) ) {s = 130;}
						else if ( (true) ) {s = 100;}
						 
						input.seek(index28_125);
						if ( s>=0 ) return s;
						break;

					case 7 : 
						int LA28_127 = input.LA(1);
						 
						int index28_127 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred1_HiveParser()) ) {s = 130;}
						else if ( (true) ) {s = 100;}
						 
						input.seek(index28_127);
						if ( s>=0 ) return s;
						break;

					case 8 : 
						int LA28_129 = input.LA(1);
						 
						int index28_129 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred1_HiveParser()) ) {s = 130;}
						else if ( (true) ) {s = 100;}
						 
						input.seek(index28_129);
						if ( s>=0 ) return s;
						break;

					case 9 : 
						int LA28_131 = input.LA(1);
						 
						int index28_131 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred1_HiveParser()) ) {s = 130;}
						else if ( (true) ) {s = 100;}
						 
						input.seek(index28_131);
						if ( s>=0 ) return s;
						break;

					case 10 : 
						int LA28_133 = input.LA(1);
						 
						int index28_133 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred2_HiveParser()) ) {s = 138;}
						else if ( (true) ) {s = 114;}
						 
						input.seek(index28_133);
						if ( s>=0 ) return s;
						break;

					case 11 : 
						int LA28_135 = input.LA(1);
						 
						int index28_135 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred2_HiveParser()) ) {s = 138;}
						else if ( (true) ) {s = 114;}
						 
						input.seek(index28_135);
						if ( s>=0 ) return s;
						break;

					case 12 : 
						int LA28_137 = input.LA(1);
						 
						int index28_137 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred2_HiveParser()) ) {s = 138;}
						else if ( (true) ) {s = 114;}
						 
						input.seek(index28_137);
						if ( s>=0 ) return s;
						break;

					case 13 : 
						int LA28_139 = input.LA(1);
						 
						int index28_139 = input.index();
						input.rewind();
						s = -1;
						if ( (synpred2_HiveParser()) ) {s = 138;}
						else if ( (true) ) {s = 114;}
						 
						input.seek(index28_139);
						if ( s>=0 ) return s;
						break;
			}
			if (state.backtracking>0) {state.failed=true; return -1;}
			NoViableAltException nvae =
				new NoViableAltException(getDescription(), 28, _s, input);
			error(nvae);
			throw nvae;
		}
	}

	static final String DFA226_eotS =
		"\134\uffff";
	static final String DFA226_eofS =
		"\1\2\133\uffff";
	static final String DFA226_minS =
		"\1\12\1\15\41\uffff\1\4\70\uffff";
	static final String DFA226_maxS =
		"\1\u0177\1\u028c\41\uffff\1\u017b\70\uffff";
	static final String DFA226_acceptS =
		"\2\uffff\1\2\73\uffff\1\1\35\uffff";
	static final String DFA226_specialS =
		"\134\uffff}>";
	static final String[] DFA226_transitionS = {
			"\1\2\37\uffff\1\2\23\uffff\1\2\52\uffff\1\2\13\uffff\1\2\26\uffff\1\2"+
			"\4\uffff\1\2\1\uffff\1\2\2\uffff\1\2\10\uffff\1\2\1\uffff\1\2\14\uffff"+
			"\1\2\4\uffff\2\2\2\uffff\1\2\6\uffff\1\1\6\uffff\1\2\12\uffff\1\2\10"+
			"\uffff\1\2\36\uffff\3\2\32\uffff\1\2\14\uffff\1\2\5\uffff\1\2\10\uffff"+
			"\1\2\22\uffff\1\2\13\uffff\1\2\3\uffff\1\2\10\uffff\1\2\1\uffff\1\2\12"+
			"\uffff\1\2\14\uffff\1\2",
			"\1\2\13\uffff\10\2\1\uffff\1\2\1\uffff\1\2\3\uffff\2\2\1\uffff\1\2\1"+
			"\uffff\2\2\1\uffff\3\2\1\uffff\2\2\1\uffff\5\2\1\uffff\5\2\1\uffff\2"+
			"\2\1\uffff\4\2\2\uffff\1\2\4\uffff\2\2\1\uffff\1\2\1\uffff\5\2\1\uffff"+
			"\3\2\1\uffff\3\2\1\uffff\4\2\1\uffff\4\2\1\uffff\2\2\1\uffff\1\2\1\uffff"+
			"\2\2\2\uffff\5\2\2\uffff\2\2\1\uffff\6\2\3\uffff\2\2\3\uffff\1\2\2\uffff"+
			"\1\2\1\uffff\3\2\2\uffff\2\2\1\uffff\3\2\1\uffff\1\2\1\uffff\1\2\2\uffff"+
			"\3\2\1\uffff\1\2\1\43\3\2\3\uffff\1\2\1\uffff\3\2\1\uffff\5\2\1\uffff"+
			"\6\2\1\uffff\1\2\1\uffff\2\2\1\uffff\2\2\1\uffff\6\2\1\uffff\1\2\2\uffff"+
			"\2\2\4\uffff\2\2\1\uffff\2\2\1\uffff\3\2\1\uffff\4\2\4\uffff\1\2\1\uffff"+
			"\4\2\1\uffff\1\2\1\uffff\3\2\3\uffff\12\2\1\uffff\1\2\2\uffff\2\2\4\uffff"+
			"\4\2\1\uffff\4\2\1\uffff\12\2\1\uffff\7\2\2\uffff\1\2\1\uffff\3\2\2\uffff"+
			"\3\2\1\uffff\3\2\2\uffff\1\2\1\uffff\1\2\1\uffff\1\2\1\uffff\1\2\2\uffff"+
			"\4\2\1\uffff\2\2\2\uffff\3\2\1\uffff\1\2\1\uffff\5\2\2\uffff\1\2\2\uffff"+
			"\5\2\4\uffff\1\2\2\uffff\1\2\2\uffff\3\2\10\uffff\3\2\46\uffff\1\2\44"+
			"\uffff\1\2\56\uffff\1\2\3\uffff\1\2\56\uffff\1\2\3\uffff\1\2\31\uffff"+
			"\1\2\6\uffff\1\2\73\uffff\1\2",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"\3\2\3\uffff\2\2\2\uffff\2\2\1\uffff\1\2\1\uffff\2\2\1\uffff\2\2\15"+
			"\uffff\1\2\11\uffff\1\2\150\uffff\1\2\13\uffff\1\2\15\uffff\1\2\33\uffff"+
			"\1\2\11\uffff\1\2\40\uffff\1\2\2\uffff\1\2\15\uffff\1\2\4\uffff\1\2\43"+
			"\uffff\1\76\34\uffff\1\2\26\uffff\2\2\1\uffff\2\2\1\uffff\3\2\2\uffff"+
			"\1\2\10\uffff\1\2",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			"",
			""
	};

	static final short[] DFA226_eot = DFA.unpackEncodedString(DFA226_eotS);
	static final short[] DFA226_eof = DFA.unpackEncodedString(DFA226_eofS);
	static final char[] DFA226_min = DFA.unpackEncodedStringToUnsignedChars(DFA226_minS);
	static final char[] DFA226_max = DFA.unpackEncodedStringToUnsignedChars(DFA226_maxS);
	static final short[] DFA226_accept = DFA.unpackEncodedString(DFA226_acceptS);
	static final short[] DFA226_special = DFA.unpackEncodedString(DFA226_specialS);
	static final short[][] DFA226_transition;

	static {
		int numStates = DFA226_transitionS.length;
		DFA226_transition = new short[numStates][];
		for (int i=0; i