All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.hive.ql.parse.HiveParser Maven / Gradle / Ivy

There is a newer version: 4.0.1
Show newest version
// $ANTLR 3.4 org/apache/hadoop/hive/ql/parse/HiveParser.g 2015-05-14 15:45:15

package org.apache.hadoop.hive.ql.parse;

import java.util.Collection;
import java.util.HashMap;


import org.antlr.runtime.*;
import java.util.Stack;
import java.util.List;
import java.util.ArrayList;

import org.antlr.runtime.tree.*;


/**
   Licensed to the Apache Software Foundation (ASF) under one or more 
   contributor license agreements.  See the NOTICE file distributed with 
   this work for additional information regarding copyright ownership.
   The ASF licenses this file to You under the Apache License, Version 2.0
   (the "License"); you may not use this file except in compliance with 
   the License.  You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
*/
@SuppressWarnings({"all", "warnings", "unchecked"})
public class HiveParser extends Parser {
    public static final String[] tokenNames = new String[] {
        "", "", "", "", "AMPERSAND", "BITWISEOR", "BITWISEXOR", "BigintLiteral", "ByteLengthLiteral", "COLON", "COMMA", "COMMENT", "CharSetLiteral", "CharSetName", "DIV", "DIVIDE", "DOLLAR", "DOT", "DecimalLiteral", "Digit", "EQUAL", "EQUAL_NS", "Exponent", "GREATERTHAN", "GREATERTHANOREQUALTO", "HexDigit", "Identifier", "KW_ADD", "KW_ADMIN", "KW_AFTER", "KW_ALL", "KW_ALTER", "KW_ANALYZE", "KW_AND", "KW_ARCHIVE", "KW_ARRAY", "KW_AS", "KW_ASC", "KW_AUTHORIZATION", "KW_BEFORE", "KW_BETWEEN", "KW_BIGINT", "KW_BINARY", "KW_BOOLEAN", "KW_BOTH", "KW_BUCKET", "KW_BUCKETS", "KW_BY", "KW_CASCADE", "KW_CASE", "KW_CAST", "KW_CHANGE", "KW_CHAR", "KW_CLUSTER", "KW_CLUSTERED", "KW_CLUSTERSTATUS", "KW_COLLECTION", "KW_COLUMN", "KW_COLUMNS", "KW_COMMENT", "KW_COMPACT", "KW_COMPACTIONS", "KW_COMPUTE", "KW_CONCATENATE", "KW_CONF", "KW_CONTINUE", "KW_CREATE", "KW_CROSS", "KW_CUBE", "KW_CURRENT", "KW_CURSOR", "KW_DATA", "KW_DATABASE", "KW_DATABASES", "KW_DATE", "KW_DATETIME", "KW_DBPROPERTIES", "KW_DECIMAL", "KW_DEFAULT", "KW_DEFERRED", "KW_DEFINED", "KW_DELETE", "KW_DELIMITED", "KW_DEPENDENCY", "KW_DESC", "KW_DESCRIBE", "KW_DIRECTORIES", "KW_DIRECTORY", "KW_DISABLE", "KW_DISTINCT", "KW_DISTRIBUTE", "KW_DOUBLE", "KW_DROP", "KW_ELEM_TYPE", "KW_ELSE", "KW_ENABLE", "KW_END", "KW_ESCAPED", "KW_EXCHANGE", "KW_EXCLUSIVE", "KW_EXISTS", "KW_EXPLAIN", "KW_EXPORT", "KW_EXTENDED", "KW_EXTERNAL", "KW_FALSE", "KW_FETCH", "KW_FIELDS", "KW_FILE", "KW_FILEFORMAT", "KW_FIRST", "KW_FLOAT", "KW_FOLLOWING", "KW_FOR", "KW_FORMAT", "KW_FORMATTED", "KW_FROM", "KW_FULL", "KW_FUNCTION", "KW_FUNCTIONS", "KW_GRANT", "KW_GROUP", "KW_GROUPING", "KW_HAVING", "KW_HOLD_DDLTIME", "KW_IDXPROPERTIES", "KW_IF", "KW_IGNORE", "KW_IMPORT", "KW_IN", "KW_INDEX", "KW_INDEXES", "KW_INNER", "KW_INPATH", "KW_INPUTDRIVER", "KW_INPUTFORMAT", "KW_INSERT", "KW_INT", "KW_INTERSECT", "KW_INTO", "KW_IS", "KW_ITEMS", "KW_JAR", "KW_JOIN", "KW_KEYS", "KW_KEY_TYPE", "KW_LATERAL", "KW_LEFT", "KW_LESS", "KW_LIKE", "KW_LIMIT", "KW_LINES", "KW_LOAD", "KW_LOCAL", "KW_LOCATION", "KW_LOCK", "KW_LOCKS", "KW_LOGICAL", "KW_LONG", "KW_MACRO", "KW_MAP", "KW_MAPJOIN", "KW_MATERIALIZED", "KW_MINUS", "KW_MORE", "KW_MSCK", "KW_NONE", "KW_NOSCAN", "KW_NOT", "KW_NO_DROP", "KW_NULL", "KW_OF", "KW_OFFLINE", "KW_ON", "KW_OPTION", "KW_OR", "KW_ORDER", "KW_OUT", "KW_OUTER", "KW_OUTPUTDRIVER", "KW_OUTPUTFORMAT", "KW_OVER", "KW_OVERWRITE", "KW_OWNER", "KW_PARTIALSCAN", "KW_PARTITION", "KW_PARTITIONED", "KW_PARTITIONS", "KW_PERCENT", "KW_PLUS", "KW_PRECEDING", "KW_PRESERVE", "KW_PRETTY", "KW_PRINCIPALS", "KW_PROCEDURE", "KW_PROTECTION", "KW_PURGE", "KW_RANGE", "KW_READ", "KW_READONLY", "KW_READS", "KW_REBUILD", "KW_RECORDREADER", "KW_RECORDWRITER", "KW_REDUCE", "KW_REGEXP", "KW_RENAME", "KW_REPAIR", "KW_REPLACE", "KW_RESTRICT", "KW_REVOKE", "KW_REWRITE", "KW_RIGHT", "KW_RLIKE", "KW_ROLE", "KW_ROLES", "KW_ROLLUP", "KW_ROW", "KW_ROWS", "KW_SCHEMA", "KW_SCHEMAS", "KW_SELECT", "KW_SEMI", "KW_SERDE", "KW_SERDEPROPERTIES", "KW_SET", "KW_SETS", "KW_SHARED", "KW_SHOW", "KW_SHOW_DATABASE", "KW_SKEWED", "KW_SMALLINT", "KW_SORT", "KW_SORTED", "KW_SSL", "KW_STATISTICS", "KW_STORED", "KW_STREAMTABLE", "KW_STRING", "KW_STRUCT", "KW_TABLE", "KW_TABLES", "KW_TABLESAMPLE", "KW_TBLPROPERTIES", "KW_TEMPORARY", "KW_TERMINATED", "KW_THEN", "KW_TIMESTAMP", "KW_TINYINT", "KW_TO", "KW_TOUCH", "KW_TRANSACTIONS", "KW_TRANSFORM", "KW_TRIGGER", "KW_TRUE", "KW_TRUNCATE", "KW_UNARCHIVE", "KW_UNBOUNDED", "KW_UNDO", "KW_UNION", "KW_UNIONTYPE", "KW_UNIQUEJOIN", "KW_UNLOCK", "KW_UNSET", "KW_UNSIGNED", "KW_UPDATE", "KW_USE", "KW_USER", "KW_USING", "KW_UTC", "KW_UTCTIMESTAMP", "KW_VALUES", "KW_VALUE_TYPE", "KW_VARCHAR", "KW_VIEW", "KW_WHEN", "KW_WHERE", "KW_WHILE", "KW_WINDOW", "KW_WITH", "LCURLY", "LESSTHAN", "LESSTHANOREQUALTO", "LPAREN", "LSQUARE", "Letter", "MINUS", "MOD", "NOTEQUAL", "Number", "PLUS", "QUESTION", "QuotedIdentifier", "RCURLY", "RPAREN", "RSQUARE", "RegexComponent", "SEMICOLON", "STAR", "SmallintLiteral", "StringLiteral", "TILDE", "TinyintLiteral", "WS", "TOK_ADMIN_OPTION_FOR", "TOK_ALIASLIST", "TOK_ALLCOLREF", "TOK_ALTERDATABASE_OWNER", "TOK_ALTERDATABASE_PROPERTIES", "TOK_ALTERINDEX_PROPERTIES", "TOK_ALTERINDEX_REBUILD", "TOK_ALTERTABLE", "TOK_ALTERTABLE_ADDCOLS", "TOK_ALTERTABLE_ADDPARTS", "TOK_ALTERTABLE_ARCHIVE", "TOK_ALTERTABLE_BUCKETS", "TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION", "TOK_ALTERTABLE_CLUSTER_SORT", "TOK_ALTERTABLE_COMPACT", "TOK_ALTERTABLE_DROPPARTS", "TOK_ALTERTABLE_DROPPROPERTIES", "TOK_ALTERTABLE_EXCHANGEPARTITION", "TOK_ALTERTABLE_FILEFORMAT", "TOK_ALTERTABLE_LOCATION", "TOK_ALTERTABLE_MERGEFILES", "TOK_ALTERTABLE_PARTCOLTYPE", "TOK_ALTERTABLE_PROPERTIES", "TOK_ALTERTABLE_PROTECTMODE", "TOK_ALTERTABLE_RENAME", "TOK_ALTERTABLE_RENAMECOL", "TOK_ALTERTABLE_RENAMEPART", "TOK_ALTERTABLE_REPLACECOLS", "TOK_ALTERTABLE_SERDEPROPERTIES", "TOK_ALTERTABLE_SERIALIZER", "TOK_ALTERTABLE_SKEWED", "TOK_ALTERTABLE_SKEWED_LOCATION", "TOK_ALTERTABLE_TOUCH", "TOK_ALTERTABLE_UNARCHIVE", "TOK_ALTERTABLE_UPDATECOLSTATS", "TOK_ALTERVIEW", "TOK_ALTERVIEW_ADDPARTS", "TOK_ALTERVIEW_DROPPARTS", "TOK_ALTERVIEW_DROPPROPERTIES", "TOK_ALTERVIEW_PROPERTIES", "TOK_ALTERVIEW_RENAME", "TOK_ANALYZE", "TOK_ANONYMOUS", "TOK_ARCHIVE", "TOK_BIGINT", "TOK_BINARY", "TOK_BOOLEAN", "TOK_CASCADE", "TOK_CHAR", "TOK_CHARSETLITERAL", "TOK_CLUSTERBY", "TOK_COLTYPELIST", "TOK_COL_NAME", "TOK_CREATEDATABASE", "TOK_CREATEFUNCTION", "TOK_CREATEINDEX", "TOK_CREATEINDEX_INDEXTBLNAME", "TOK_CREATEMACRO", "TOK_CREATEROLE", "TOK_CREATETABLE", "TOK_CREATEVIEW", "TOK_CROSSJOIN", "TOK_CTE", "TOK_CUBE_GROUPBY", "TOK_DATABASECOMMENT", "TOK_DATABASELOCATION", "TOK_DATABASEPROPERTIES", "TOK_DATE", "TOK_DATELITERAL", "TOK_DATETIME", "TOK_DBPROPLIST", "TOK_DB_TYPE", "TOK_DECIMAL", "TOK_DEFERRED_REBUILDINDEX", "TOK_DELETE_FROM", "TOK_DESCDATABASE", "TOK_DESCFUNCTION", "TOK_DESCTABLE", "TOK_DESTINATION", "TOK_DIR", "TOK_DISABLE", "TOK_DISTRIBUTEBY", "TOK_DOUBLE", "TOK_DROPDATABASE", "TOK_DROPFUNCTION", "TOK_DROPINDEX", "TOK_DROPMACRO", "TOK_DROPROLE", "TOK_DROPTABLE", "TOK_DROPVIEW", "TOK_ENABLE", "TOK_EXPLAIN", "TOK_EXPLAIN_SQ_REWRITE", "TOK_EXPLIST", "TOK_EXPORT", "TOK_FALSE", "TOK_FILE", "TOK_FILEFORMAT_GENERIC", "TOK_FLOAT", "TOK_FROM", "TOK_FULLOUTERJOIN", "TOK_FUNCTION", "TOK_FUNCTIONDI", "TOK_FUNCTIONSTAR", "TOK_GRANT", "TOK_GRANT_OPTION_FOR", "TOK_GRANT_ROLE", "TOK_GRANT_WITH_ADMIN_OPTION", "TOK_GRANT_WITH_OPTION", "TOK_GROUP", "TOK_GROUPBY", "TOK_GROUPING_SETS", "TOK_GROUPING_SETS_EXPRESSION", "TOK_HAVING", "TOK_HINT", "TOK_HINTARGLIST", "TOK_HINTLIST", "TOK_HOLD_DDLTIME", "TOK_IFEXISTS", "TOK_IFNOTEXISTS", "TOK_IGNOREPROTECTION", "TOK_IMPORT", "TOK_INDEXCOMMENT", "TOK_INDEXPROPERTIES", "TOK_INDEXPROPLIST", "TOK_INSERT", "TOK_INSERT_INTO", "TOK_INT", "TOK_ISNOTNULL", "TOK_ISNULL", "TOK_JAR", "TOK_JOIN", "TOK_LATERAL_VIEW", "TOK_LATERAL_VIEW_OUTER", "TOK_LEFTOUTERJOIN", "TOK_LEFTSEMIJOIN", "TOK_LENGTH", "TOK_LIKETABLE", "TOK_LIMIT", "TOK_LIST", "TOK_LOAD", "TOK_LOCAL_DIR", "TOK_LOCKDB", "TOK_LOCKTABLE", "TOK_MAP", "TOK_MAPJOIN", "TOK_MSCK", "TOK_NOT_CLUSTERED", "TOK_NOT_SORTED", "TOK_NO_DROP", "TOK_NULL", "TOK_OFFLINE", "TOK_OP_ADD", "TOK_OP_AND", "TOK_OP_BITAND", "TOK_OP_BITNOT", "TOK_OP_BITOR", "TOK_OP_BITXOR", "TOK_OP_DIV", "TOK_OP_EQ", "TOK_OP_GE", "TOK_OP_GT", "TOK_OP_LE", "TOK_OP_LIKE", "TOK_OP_LT", "TOK_OP_MOD", "TOK_OP_MUL", "TOK_OP_NE", "TOK_OP_NOT", "TOK_OP_OR", "TOK_OP_SUB", "TOK_ORDERBY", "TOK_ORREPLACE", "TOK_PARTITIONINGSPEC", "TOK_PARTITIONLOCATION", "TOK_PARTSPEC", "TOK_PARTVAL", "TOK_PERCENT", "TOK_PRINCIPAL_NAME", "TOK_PRIVILEGE", "TOK_PRIVILEGE_LIST", "TOK_PRIV_ALL", "TOK_PRIV_ALTER_DATA", "TOK_PRIV_ALTER_METADATA", "TOK_PRIV_CREATE", "TOK_PRIV_DELETE", "TOK_PRIV_DROP", "TOK_PRIV_INDEX", "TOK_PRIV_INSERT", "TOK_PRIV_LOCK", "TOK_PRIV_OBJECT", "TOK_PRIV_OBJECT_COL", "TOK_PRIV_SELECT", "TOK_PRIV_SHOW_DATABASE", "TOK_PTBLFUNCTION", "TOK_QUERY", "TOK_READONLY", "TOK_RECORDREADER", "TOK_RECORDWRITER", "TOK_RESOURCE_ALL", "TOK_RESOURCE_LIST", "TOK_RESOURCE_URI", "TOK_RESTRICT", "TOK_REVOKE", "TOK_REVOKE_ROLE", "TOK_RIGHTOUTERJOIN", "TOK_ROLE", "TOK_ROLLUP_GROUPBY", "TOK_ROWCOUNT", "TOK_SELECT", "TOK_SELECTDI", "TOK_SELEXPR", "TOK_SERDE", "TOK_SERDENAME", "TOK_SERDEPROPS", "TOK_SET_COLUMNS_CLAUSE", "TOK_SHOWCOLUMNS", "TOK_SHOWCONF", "TOK_SHOWDATABASES", "TOK_SHOWDBLOCKS", "TOK_SHOWFUNCTIONS", "TOK_SHOWINDEXES", "TOK_SHOWLOCKS", "TOK_SHOWPARTITIONS", "TOK_SHOWTABLES", "TOK_SHOW_COMPACTIONS", "TOK_SHOW_CREATETABLE", "TOK_SHOW_GRANT", "TOK_SHOW_ROLES", "TOK_SHOW_ROLE_GRANT", "TOK_SHOW_ROLE_PRINCIPALS", "TOK_SHOW_SET_ROLE", "TOK_SHOW_TABLESTATUS", "TOK_SHOW_TBLPROPERTIES", "TOK_SHOW_TRANSACTIONS", "TOK_SKEWED_LOCATIONS", "TOK_SKEWED_LOCATION_LIST", "TOK_SKEWED_LOCATION_MAP", "TOK_SMALLINT", "TOK_SORTBY", "TOK_STORAGEHANDLER", "TOK_STOREDASDIRS", "TOK_STREAMTABLE", "TOK_STRING", "TOK_STRINGLITERALSEQUENCE", "TOK_STRUCT", "TOK_SUBQUERY", "TOK_SUBQUERY_EXPR", "TOK_SUBQUERY_OP", "TOK_SUBQUERY_OP_NOTEXISTS", "TOK_SUBQUERY_OP_NOTIN", "TOK_SWITCHDATABASE", "TOK_TAB", "TOK_TABALIAS", "TOK_TABCOL", "TOK_TABCOLLIST", "TOK_TABCOLNAME", "TOK_TABCOLVALUE", "TOK_TABCOLVALUES", "TOK_TABCOLVALUE_PAIR", "TOK_TABLEBUCKETSAMPLE", "TOK_TABLECOMMENT", "TOK_TABLEFILEFORMAT", "TOK_TABLELOCATION", "TOK_TABLEPARTCOLS", "TOK_TABLEPROPERTIES", "TOK_TABLEPROPERTY", "TOK_TABLEPROPLIST", "TOK_TABLEROWFORMAT", "TOK_TABLEROWFORMATCOLLITEMS", "TOK_TABLEROWFORMATFIELD", "TOK_TABLEROWFORMATLINES", "TOK_TABLEROWFORMATMAPKEYS", "TOK_TABLEROWFORMATNULL", "TOK_TABLESERIALIZER", "TOK_TABLESKEWED", "TOK_TABLESPLITSAMPLE", "TOK_TABLE_OR_COL", "TOK_TABLE_PARTITION", "TOK_TABLE_TYPE", "TOK_TABNAME", "TOK_TABREF", "TOK_TABSORTCOLNAMEASC", "TOK_TABSORTCOLNAMEDESC", "TOK_TABSRC", "TOK_TABTYPE", "TOK_TEMPORARY", "TOK_TIMESTAMP", "TOK_TINYINT", "TOK_TMP_FILE", "TOK_TRANSFORM", "TOK_TRUE", "TOK_TRUNCATETABLE", "TOK_UNION", "TOK_UNIONTYPE", "TOK_UNIQUEJOIN", "TOK_UNLOCKDB", "TOK_UNLOCKTABLE", "TOK_UPDATE_TABLE", "TOK_USER", "TOK_USERSCRIPTCOLNAMES", "TOK_USERSCRIPTCOLSCHEMA", "TOK_VALUES_TABLE", "TOK_VALUE_ROW", "TOK_VARCHAR", "TOK_VIEWPARTCOLS", "TOK_VIRTUAL_TABLE", "TOK_VIRTUAL_TABREF", "TOK_WHERE", "TOK_WINDOWDEF", "TOK_WINDOWRANGE", "TOK_WINDOWSPEC", "TOK_WINDOWVALUES", "891"
    };

    public static final int EOF=-1;
    public static final int AMPERSAND=4;
    public static final int BITWISEOR=5;
    public static final int BITWISEXOR=6;
    public static final int BigintLiteral=7;
    public static final int ByteLengthLiteral=8;
    public static final int COLON=9;
    public static final int COMMA=10;
    public static final int COMMENT=11;
    public static final int CharSetLiteral=12;
    public static final int CharSetName=13;
    public static final int DIV=14;
    public static final int DIVIDE=15;
    public static final int DOLLAR=16;
    public static final int DOT=17;
    public static final int DecimalLiteral=18;
    public static final int Digit=19;
    public static final int EQUAL=20;
    public static final int EQUAL_NS=21;
    public static final int Exponent=22;
    public static final int GREATERTHAN=23;
    public static final int GREATERTHANOREQUALTO=24;
    public static final int HexDigit=25;
    public static final int Identifier=26;
    public static final int KW_ADD=27;
    public static final int KW_ADMIN=28;
    public static final int KW_AFTER=29;
    public static final int KW_ALL=30;
    public static final int KW_ALTER=31;
    public static final int KW_ANALYZE=32;
    public static final int KW_AND=33;
    public static final int KW_ARCHIVE=34;
    public static final int KW_ARRAY=35;
    public static final int KW_AS=36;
    public static final int KW_ASC=37;
    public static final int KW_AUTHORIZATION=38;
    public static final int KW_BEFORE=39;
    public static final int KW_BETWEEN=40;
    public static final int KW_BIGINT=41;
    public static final int KW_BINARY=42;
    public static final int KW_BOOLEAN=43;
    public static final int KW_BOTH=44;
    public static final int KW_BUCKET=45;
    public static final int KW_BUCKETS=46;
    public static final int KW_BY=47;
    public static final int KW_CASCADE=48;
    public static final int KW_CASE=49;
    public static final int KW_CAST=50;
    public static final int KW_CHANGE=51;
    public static final int KW_CHAR=52;
    public static final int KW_CLUSTER=53;
    public static final int KW_CLUSTERED=54;
    public static final int KW_CLUSTERSTATUS=55;
    public static final int KW_COLLECTION=56;
    public static final int KW_COLUMN=57;
    public static final int KW_COLUMNS=58;
    public static final int KW_COMMENT=59;
    public static final int KW_COMPACT=60;
    public static final int KW_COMPACTIONS=61;
    public static final int KW_COMPUTE=62;
    public static final int KW_CONCATENATE=63;
    public static final int KW_CONF=64;
    public static final int KW_CONTINUE=65;
    public static final int KW_CREATE=66;
    public static final int KW_CROSS=67;
    public static final int KW_CUBE=68;
    public static final int KW_CURRENT=69;
    public static final int KW_CURSOR=70;
    public static final int KW_DATA=71;
    public static final int KW_DATABASE=72;
    public static final int KW_DATABASES=73;
    public static final int KW_DATE=74;
    public static final int KW_DATETIME=75;
    public static final int KW_DBPROPERTIES=76;
    public static final int KW_DECIMAL=77;
    public static final int KW_DEFAULT=78;
    public static final int KW_DEFERRED=79;
    public static final int KW_DEFINED=80;
    public static final int KW_DELETE=81;
    public static final int KW_DELIMITED=82;
    public static final int KW_DEPENDENCY=83;
    public static final int KW_DESC=84;
    public static final int KW_DESCRIBE=85;
    public static final int KW_DIRECTORIES=86;
    public static final int KW_DIRECTORY=87;
    public static final int KW_DISABLE=88;
    public static final int KW_DISTINCT=89;
    public static final int KW_DISTRIBUTE=90;
    public static final int KW_DOUBLE=91;
    public static final int KW_DROP=92;
    public static final int KW_ELEM_TYPE=93;
    public static final int KW_ELSE=94;
    public static final int KW_ENABLE=95;
    public static final int KW_END=96;
    public static final int KW_ESCAPED=97;
    public static final int KW_EXCHANGE=98;
    public static final int KW_EXCLUSIVE=99;
    public static final int KW_EXISTS=100;
    public static final int KW_EXPLAIN=101;
    public static final int KW_EXPORT=102;
    public static final int KW_EXTENDED=103;
    public static final int KW_EXTERNAL=104;
    public static final int KW_FALSE=105;
    public static final int KW_FETCH=106;
    public static final int KW_FIELDS=107;
    public static final int KW_FILE=108;
    public static final int KW_FILEFORMAT=109;
    public static final int KW_FIRST=110;
    public static final int KW_FLOAT=111;
    public static final int KW_FOLLOWING=112;
    public static final int KW_FOR=113;
    public static final int KW_FORMAT=114;
    public static final int KW_FORMATTED=115;
    public static final int KW_FROM=116;
    public static final int KW_FULL=117;
    public static final int KW_FUNCTION=118;
    public static final int KW_FUNCTIONS=119;
    public static final int KW_GRANT=120;
    public static final int KW_GROUP=121;
    public static final int KW_GROUPING=122;
    public static final int KW_HAVING=123;
    public static final int KW_HOLD_DDLTIME=124;
    public static final int KW_IDXPROPERTIES=125;
    public static final int KW_IF=126;
    public static final int KW_IGNORE=127;
    public static final int KW_IMPORT=128;
    public static final int KW_IN=129;
    public static final int KW_INDEX=130;
    public static final int KW_INDEXES=131;
    public static final int KW_INNER=132;
    public static final int KW_INPATH=133;
    public static final int KW_INPUTDRIVER=134;
    public static final int KW_INPUTFORMAT=135;
    public static final int KW_INSERT=136;
    public static final int KW_INT=137;
    public static final int KW_INTERSECT=138;
    public static final int KW_INTO=139;
    public static final int KW_IS=140;
    public static final int KW_ITEMS=141;
    public static final int KW_JAR=142;
    public static final int KW_JOIN=143;
    public static final int KW_KEYS=144;
    public static final int KW_KEY_TYPE=145;
    public static final int KW_LATERAL=146;
    public static final int KW_LEFT=147;
    public static final int KW_LESS=148;
    public static final int KW_LIKE=149;
    public static final int KW_LIMIT=150;
    public static final int KW_LINES=151;
    public static final int KW_LOAD=152;
    public static final int KW_LOCAL=153;
    public static final int KW_LOCATION=154;
    public static final int KW_LOCK=155;
    public static final int KW_LOCKS=156;
    public static final int KW_LOGICAL=157;
    public static final int KW_LONG=158;
    public static final int KW_MACRO=159;
    public static final int KW_MAP=160;
    public static final int KW_MAPJOIN=161;
    public static final int KW_MATERIALIZED=162;
    public static final int KW_MINUS=163;
    public static final int KW_MORE=164;
    public static final int KW_MSCK=165;
    public static final int KW_NONE=166;
    public static final int KW_NOSCAN=167;
    public static final int KW_NOT=168;
    public static final int KW_NO_DROP=169;
    public static final int KW_NULL=170;
    public static final int KW_OF=171;
    public static final int KW_OFFLINE=172;
    public static final int KW_ON=173;
    public static final int KW_OPTION=174;
    public static final int KW_OR=175;
    public static final int KW_ORDER=176;
    public static final int KW_OUT=177;
    public static final int KW_OUTER=178;
    public static final int KW_OUTPUTDRIVER=179;
    public static final int KW_OUTPUTFORMAT=180;
    public static final int KW_OVER=181;
    public static final int KW_OVERWRITE=182;
    public static final int KW_OWNER=183;
    public static final int KW_PARTIALSCAN=184;
    public static final int KW_PARTITION=185;
    public static final int KW_PARTITIONED=186;
    public static final int KW_PARTITIONS=187;
    public static final int KW_PERCENT=188;
    public static final int KW_PLUS=189;
    public static final int KW_PRECEDING=190;
    public static final int KW_PRESERVE=191;
    public static final int KW_PRETTY=192;
    public static final int KW_PRINCIPALS=193;
    public static final int KW_PROCEDURE=194;
    public static final int KW_PROTECTION=195;
    public static final int KW_PURGE=196;
    public static final int KW_RANGE=197;
    public static final int KW_READ=198;
    public static final int KW_READONLY=199;
    public static final int KW_READS=200;
    public static final int KW_REBUILD=201;
    public static final int KW_RECORDREADER=202;
    public static final int KW_RECORDWRITER=203;
    public static final int KW_REDUCE=204;
    public static final int KW_REGEXP=205;
    public static final int KW_RENAME=206;
    public static final int KW_REPAIR=207;
    public static final int KW_REPLACE=208;
    public static final int KW_RESTRICT=209;
    public static final int KW_REVOKE=210;
    public static final int KW_REWRITE=211;
    public static final int KW_RIGHT=212;
    public static final int KW_RLIKE=213;
    public static final int KW_ROLE=214;
    public static final int KW_ROLES=215;
    public static final int KW_ROLLUP=216;
    public static final int KW_ROW=217;
    public static final int KW_ROWS=218;
    public static final int KW_SCHEMA=219;
    public static final int KW_SCHEMAS=220;
    public static final int KW_SELECT=221;
    public static final int KW_SEMI=222;
    public static final int KW_SERDE=223;
    public static final int KW_SERDEPROPERTIES=224;
    public static final int KW_SET=225;
    public static final int KW_SETS=226;
    public static final int KW_SHARED=227;
    public static final int KW_SHOW=228;
    public static final int KW_SHOW_DATABASE=229;
    public static final int KW_SKEWED=230;
    public static final int KW_SMALLINT=231;
    public static final int KW_SORT=232;
    public static final int KW_SORTED=233;
    public static final int KW_SSL=234;
    public static final int KW_STATISTICS=235;
    public static final int KW_STORED=236;
    public static final int KW_STREAMTABLE=237;
    public static final int KW_STRING=238;
    public static final int KW_STRUCT=239;
    public static final int KW_TABLE=240;
    public static final int KW_TABLES=241;
    public static final int KW_TABLESAMPLE=242;
    public static final int KW_TBLPROPERTIES=243;
    public static final int KW_TEMPORARY=244;
    public static final int KW_TERMINATED=245;
    public static final int KW_THEN=246;
    public static final int KW_TIMESTAMP=247;
    public static final int KW_TINYINT=248;
    public static final int KW_TO=249;
    public static final int KW_TOUCH=250;
    public static final int KW_TRANSACTIONS=251;
    public static final int KW_TRANSFORM=252;
    public static final int KW_TRIGGER=253;
    public static final int KW_TRUE=254;
    public static final int KW_TRUNCATE=255;
    public static final int KW_UNARCHIVE=256;
    public static final int KW_UNBOUNDED=257;
    public static final int KW_UNDO=258;
    public static final int KW_UNION=259;
    public static final int KW_UNIONTYPE=260;
    public static final int KW_UNIQUEJOIN=261;
    public static final int KW_UNLOCK=262;
    public static final int KW_UNSET=263;
    public static final int KW_UNSIGNED=264;
    public static final int KW_UPDATE=265;
    public static final int KW_USE=266;
    public static final int KW_USER=267;
    public static final int KW_USING=268;
    public static final int KW_UTC=269;
    public static final int KW_UTCTIMESTAMP=270;
    public static final int KW_VALUES=271;
    public static final int KW_VALUE_TYPE=272;
    public static final int KW_VARCHAR=273;
    public static final int KW_VIEW=274;
    public static final int KW_WHEN=275;
    public static final int KW_WHERE=276;
    public static final int KW_WHILE=277;
    public static final int KW_WINDOW=278;
    public static final int KW_WITH=279;
    public static final int LCURLY=280;
    public static final int LESSTHAN=281;
    public static final int LESSTHANOREQUALTO=282;
    public static final int LPAREN=283;
    public static final int LSQUARE=284;
    public static final int Letter=285;
    public static final int MINUS=286;
    public static final int MOD=287;
    public static final int NOTEQUAL=288;
    public static final int Number=289;
    public static final int PLUS=290;
    public static final int QUESTION=291;
    public static final int QuotedIdentifier=292;
    public static final int RCURLY=293;
    public static final int RPAREN=294;
    public static final int RSQUARE=295;
    public static final int RegexComponent=296;
    public static final int SEMICOLON=297;
    public static final int STAR=298;
    public static final int SmallintLiteral=299;
    public static final int StringLiteral=300;
    public static final int TILDE=301;
    public static final int TinyintLiteral=302;
    public static final int WS=303;
    public static final int TOK_ADMIN_OPTION_FOR=578;
    public static final int TOK_ALIASLIST=579;
    public static final int TOK_ALLCOLREF=580;
    public static final int TOK_ALTERDATABASE_OWNER=581;
    public static final int TOK_ALTERDATABASE_PROPERTIES=582;
    public static final int TOK_ALTERINDEX_PROPERTIES=583;
    public static final int TOK_ALTERINDEX_REBUILD=584;
    public static final int TOK_ALTERTABLE=585;
    public static final int TOK_ALTERTABLE_ADDCOLS=586;
    public static final int TOK_ALTERTABLE_ADDPARTS=587;
    public static final int TOK_ALTERTABLE_ARCHIVE=588;
    public static final int TOK_ALTERTABLE_BUCKETS=589;
    public static final int TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION=590;
    public static final int TOK_ALTERTABLE_CLUSTER_SORT=591;
    public static final int TOK_ALTERTABLE_COMPACT=592;
    public static final int TOK_ALTERTABLE_DROPPARTS=593;
    public static final int TOK_ALTERTABLE_DROPPROPERTIES=594;
    public static final int TOK_ALTERTABLE_EXCHANGEPARTITION=595;
    public static final int TOK_ALTERTABLE_FILEFORMAT=596;
    public static final int TOK_ALTERTABLE_LOCATION=597;
    public static final int TOK_ALTERTABLE_MERGEFILES=598;
    public static final int TOK_ALTERTABLE_PARTCOLTYPE=599;
    public static final int TOK_ALTERTABLE_PROPERTIES=600;
    public static final int TOK_ALTERTABLE_PROTECTMODE=601;
    public static final int TOK_ALTERTABLE_RENAME=602;
    public static final int TOK_ALTERTABLE_RENAMECOL=603;
    public static final int TOK_ALTERTABLE_RENAMEPART=604;
    public static final int TOK_ALTERTABLE_REPLACECOLS=605;
    public static final int TOK_ALTERTABLE_SERDEPROPERTIES=606;
    public static final int TOK_ALTERTABLE_SERIALIZER=607;
    public static final int TOK_ALTERTABLE_SKEWED=608;
    public static final int TOK_ALTERTABLE_SKEWED_LOCATION=609;
    public static final int TOK_ALTERTABLE_TOUCH=610;
    public static final int TOK_ALTERTABLE_UNARCHIVE=611;
    public static final int TOK_ALTERTABLE_UPDATECOLSTATS=612;
    public static final int TOK_ALTERVIEW=613;
    public static final int TOK_ALTERVIEW_ADDPARTS=614;
    public static final int TOK_ALTERVIEW_DROPPARTS=615;
    public static final int TOK_ALTERVIEW_DROPPROPERTIES=616;
    public static final int TOK_ALTERVIEW_PROPERTIES=617;
    public static final int TOK_ALTERVIEW_RENAME=618;
    public static final int TOK_ANALYZE=619;
    public static final int TOK_ANONYMOUS=620;
    public static final int TOK_ARCHIVE=621;
    public static final int TOK_BIGINT=622;
    public static final int TOK_BINARY=623;
    public static final int TOK_BOOLEAN=624;
    public static final int TOK_CASCADE=625;
    public static final int TOK_CHAR=626;
    public static final int TOK_CHARSETLITERAL=627;
    public static final int TOK_CLUSTERBY=628;
    public static final int TOK_COLTYPELIST=629;
    public static final int TOK_COL_NAME=630;
    public static final int TOK_CREATEDATABASE=631;
    public static final int TOK_CREATEFUNCTION=632;
    public static final int TOK_CREATEINDEX=633;
    public static final int TOK_CREATEINDEX_INDEXTBLNAME=634;
    public static final int TOK_CREATEMACRO=635;
    public static final int TOK_CREATEROLE=636;
    public static final int TOK_CREATETABLE=637;
    public static final int TOK_CREATEVIEW=638;
    public static final int TOK_CROSSJOIN=639;
    public static final int TOK_CTE=640;
    public static final int TOK_CUBE_GROUPBY=641;
    public static final int TOK_DATABASECOMMENT=642;
    public static final int TOK_DATABASELOCATION=643;
    public static final int TOK_DATABASEPROPERTIES=644;
    public static final int TOK_DATE=645;
    public static final int TOK_DATELITERAL=646;
    public static final int TOK_DATETIME=647;
    public static final int TOK_DBPROPLIST=648;
    public static final int TOK_DB_TYPE=649;
    public static final int TOK_DECIMAL=650;
    public static final int TOK_DEFERRED_REBUILDINDEX=651;
    public static final int TOK_DELETE_FROM=652;
    public static final int TOK_DESCDATABASE=653;
    public static final int TOK_DESCFUNCTION=654;
    public static final int TOK_DESCTABLE=655;
    public static final int TOK_DESTINATION=656;
    public static final int TOK_DIR=657;
    public static final int TOK_DISABLE=658;
    public static final int TOK_DISTRIBUTEBY=659;
    public static final int TOK_DOUBLE=660;
    public static final int TOK_DROPDATABASE=661;
    public static final int TOK_DROPFUNCTION=662;
    public static final int TOK_DROPINDEX=663;
    public static final int TOK_DROPMACRO=664;
    public static final int TOK_DROPROLE=665;
    public static final int TOK_DROPTABLE=666;
    public static final int TOK_DROPVIEW=667;
    public static final int TOK_ENABLE=668;
    public static final int TOK_EXPLAIN=669;
    public static final int TOK_EXPLAIN_SQ_REWRITE=670;
    public static final int TOK_EXPLIST=671;
    public static final int TOK_EXPORT=672;
    public static final int TOK_FALSE=673;
    public static final int TOK_FILE=674;
    public static final int TOK_FILEFORMAT_GENERIC=675;
    public static final int TOK_FLOAT=676;
    public static final int TOK_FROM=677;
    public static final int TOK_FULLOUTERJOIN=678;
    public static final int TOK_FUNCTION=679;
    public static final int TOK_FUNCTIONDI=680;
    public static final int TOK_FUNCTIONSTAR=681;
    public static final int TOK_GRANT=682;
    public static final int TOK_GRANT_OPTION_FOR=683;
    public static final int TOK_GRANT_ROLE=684;
    public static final int TOK_GRANT_WITH_ADMIN_OPTION=685;
    public static final int TOK_GRANT_WITH_OPTION=686;
    public static final int TOK_GROUP=687;
    public static final int TOK_GROUPBY=688;
    public static final int TOK_GROUPING_SETS=689;
    public static final int TOK_GROUPING_SETS_EXPRESSION=690;
    public static final int TOK_HAVING=691;
    public static final int TOK_HINT=692;
    public static final int TOK_HINTARGLIST=693;
    public static final int TOK_HINTLIST=694;
    public static final int TOK_HOLD_DDLTIME=695;
    public static final int TOK_IFEXISTS=696;
    public static final int TOK_IFNOTEXISTS=697;
    public static final int TOK_IGNOREPROTECTION=698;
    public static final int TOK_IMPORT=699;
    public static final int TOK_INDEXCOMMENT=700;
    public static final int TOK_INDEXPROPERTIES=701;
    public static final int TOK_INDEXPROPLIST=702;
    public static final int TOK_INSERT=703;
    public static final int TOK_INSERT_INTO=704;
    public static final int TOK_INT=705;
    public static final int TOK_ISNOTNULL=706;
    public static final int TOK_ISNULL=707;
    public static final int TOK_JAR=708;
    public static final int TOK_JOIN=709;
    public static final int TOK_LATERAL_VIEW=710;
    public static final int TOK_LATERAL_VIEW_OUTER=711;
    public static final int TOK_LEFTOUTERJOIN=712;
    public static final int TOK_LEFTSEMIJOIN=713;
    public static final int TOK_LENGTH=714;
    public static final int TOK_LIKETABLE=715;
    public static final int TOK_LIMIT=716;
    public static final int TOK_LIST=717;
    public static final int TOK_LOAD=718;
    public static final int TOK_LOCAL_DIR=719;
    public static final int TOK_LOCKDB=720;
    public static final int TOK_LOCKTABLE=721;
    public static final int TOK_MAP=722;
    public static final int TOK_MAPJOIN=723;
    public static final int TOK_MSCK=724;
    public static final int TOK_NOT_CLUSTERED=725;
    public static final int TOK_NOT_SORTED=726;
    public static final int TOK_NO_DROP=727;
    public static final int TOK_NULL=728;
    public static final int TOK_OFFLINE=729;
    public static final int TOK_OP_ADD=730;
    public static final int TOK_OP_AND=731;
    public static final int TOK_OP_BITAND=732;
    public static final int TOK_OP_BITNOT=733;
    public static final int TOK_OP_BITOR=734;
    public static final int TOK_OP_BITXOR=735;
    public static final int TOK_OP_DIV=736;
    public static final int TOK_OP_EQ=737;
    public static final int TOK_OP_GE=738;
    public static final int TOK_OP_GT=739;
    public static final int TOK_OP_LE=740;
    public static final int TOK_OP_LIKE=741;
    public static final int TOK_OP_LT=742;
    public static final int TOK_OP_MOD=743;
    public static final int TOK_OP_MUL=744;
    public static final int TOK_OP_NE=745;
    public static final int TOK_OP_NOT=746;
    public static final int TOK_OP_OR=747;
    public static final int TOK_OP_SUB=748;
    public static final int TOK_ORDERBY=749;
    public static final int TOK_ORREPLACE=750;
    public static final int TOK_PARTITIONINGSPEC=751;
    public static final int TOK_PARTITIONLOCATION=752;
    public static final int TOK_PARTSPEC=753;
    public static final int TOK_PARTVAL=754;
    public static final int TOK_PERCENT=755;
    public static final int TOK_PRINCIPAL_NAME=756;
    public static final int TOK_PRIVILEGE=757;
    public static final int TOK_PRIVILEGE_LIST=758;
    public static final int TOK_PRIV_ALL=759;
    public static final int TOK_PRIV_ALTER_DATA=760;
    public static final int TOK_PRIV_ALTER_METADATA=761;
    public static final int TOK_PRIV_CREATE=762;
    public static final int TOK_PRIV_DELETE=763;
    public static final int TOK_PRIV_DROP=764;
    public static final int TOK_PRIV_INDEX=765;
    public static final int TOK_PRIV_INSERT=766;
    public static final int TOK_PRIV_LOCK=767;
    public static final int TOK_PRIV_OBJECT=768;
    public static final int TOK_PRIV_OBJECT_COL=769;
    public static final int TOK_PRIV_SELECT=770;
    public static final int TOK_PRIV_SHOW_DATABASE=771;
    public static final int TOK_PTBLFUNCTION=772;
    public static final int TOK_QUERY=773;
    public static final int TOK_READONLY=774;
    public static final int TOK_RECORDREADER=775;
    public static final int TOK_RECORDWRITER=776;
    public static final int TOK_RESOURCE_ALL=777;
    public static final int TOK_RESOURCE_LIST=778;
    public static final int TOK_RESOURCE_URI=779;
    public static final int TOK_RESTRICT=780;
    public static final int TOK_REVOKE=781;
    public static final int TOK_REVOKE_ROLE=782;
    public static final int TOK_RIGHTOUTERJOIN=783;
    public static final int TOK_ROLE=784;
    public static final int TOK_ROLLUP_GROUPBY=785;
    public static final int TOK_ROWCOUNT=786;
    public static final int TOK_SELECT=787;
    public static final int TOK_SELECTDI=788;
    public static final int TOK_SELEXPR=789;
    public static final int TOK_SERDE=790;
    public static final int TOK_SERDENAME=791;
    public static final int TOK_SERDEPROPS=792;
    public static final int TOK_SET_COLUMNS_CLAUSE=793;
    public static final int TOK_SHOWCOLUMNS=794;
    public static final int TOK_SHOWCONF=795;
    public static final int TOK_SHOWDATABASES=796;
    public static final int TOK_SHOWDBLOCKS=797;
    public static final int TOK_SHOWFUNCTIONS=798;
    public static final int TOK_SHOWINDEXES=799;
    public static final int TOK_SHOWLOCKS=800;
    public static final int TOK_SHOWPARTITIONS=801;
    public static final int TOK_SHOWTABLES=802;
    public static final int TOK_SHOW_COMPACTIONS=803;
    public static final int TOK_SHOW_CREATETABLE=804;
    public static final int TOK_SHOW_GRANT=805;
    public static final int TOK_SHOW_ROLES=806;
    public static final int TOK_SHOW_ROLE_GRANT=807;
    public static final int TOK_SHOW_ROLE_PRINCIPALS=808;
    public static final int TOK_SHOW_SET_ROLE=809;
    public static final int TOK_SHOW_TABLESTATUS=810;
    public static final int TOK_SHOW_TBLPROPERTIES=811;
    public static final int TOK_SHOW_TRANSACTIONS=812;
    public static final int TOK_SKEWED_LOCATIONS=813;
    public static final int TOK_SKEWED_LOCATION_LIST=814;
    public static final int TOK_SKEWED_LOCATION_MAP=815;
    public static final int TOK_SMALLINT=816;
    public static final int TOK_SORTBY=817;
    public static final int TOK_STORAGEHANDLER=818;
    public static final int TOK_STOREDASDIRS=819;
    public static final int TOK_STREAMTABLE=820;
    public static final int TOK_STRING=821;
    public static final int TOK_STRINGLITERALSEQUENCE=822;
    public static final int TOK_STRUCT=823;
    public static final int TOK_SUBQUERY=824;
    public static final int TOK_SUBQUERY_EXPR=825;
    public static final int TOK_SUBQUERY_OP=826;
    public static final int TOK_SUBQUERY_OP_NOTEXISTS=827;
    public static final int TOK_SUBQUERY_OP_NOTIN=828;
    public static final int TOK_SWITCHDATABASE=829;
    public static final int TOK_TAB=830;
    public static final int TOK_TABALIAS=831;
    public static final int TOK_TABCOL=832;
    public static final int TOK_TABCOLLIST=833;
    public static final int TOK_TABCOLNAME=834;
    public static final int TOK_TABCOLVALUE=835;
    public static final int TOK_TABCOLVALUES=836;
    public static final int TOK_TABCOLVALUE_PAIR=837;
    public static final int TOK_TABLEBUCKETSAMPLE=838;
    public static final int TOK_TABLECOMMENT=839;
    public static final int TOK_TABLEFILEFORMAT=840;
    public static final int TOK_TABLELOCATION=841;
    public static final int TOK_TABLEPARTCOLS=842;
    public static final int TOK_TABLEPROPERTIES=843;
    public static final int TOK_TABLEPROPERTY=844;
    public static final int TOK_TABLEPROPLIST=845;
    public static final int TOK_TABLEROWFORMAT=846;
    public static final int TOK_TABLEROWFORMATCOLLITEMS=847;
    public static final int TOK_TABLEROWFORMATFIELD=848;
    public static final int TOK_TABLEROWFORMATLINES=849;
    public static final int TOK_TABLEROWFORMATMAPKEYS=850;
    public static final int TOK_TABLEROWFORMATNULL=851;
    public static final int TOK_TABLESERIALIZER=852;
    public static final int TOK_TABLESKEWED=853;
    public static final int TOK_TABLESPLITSAMPLE=854;
    public static final int TOK_TABLE_OR_COL=855;
    public static final int TOK_TABLE_PARTITION=856;
    public static final int TOK_TABLE_TYPE=857;
    public static final int TOK_TABNAME=858;
    public static final int TOK_TABREF=859;
    public static final int TOK_TABSORTCOLNAMEASC=860;
    public static final int TOK_TABSORTCOLNAMEDESC=861;
    public static final int TOK_TABSRC=862;
    public static final int TOK_TABTYPE=863;
    public static final int TOK_TEMPORARY=864;
    public static final int TOK_TIMESTAMP=865;
    public static final int TOK_TINYINT=866;
    public static final int TOK_TMP_FILE=867;
    public static final int TOK_TRANSFORM=868;
    public static final int TOK_TRUE=869;
    public static final int TOK_TRUNCATETABLE=870;
    public static final int TOK_UNION=871;
    public static final int TOK_UNIONTYPE=872;
    public static final int TOK_UNIQUEJOIN=873;
    public static final int TOK_UNLOCKDB=874;
    public static final int TOK_UNLOCKTABLE=875;
    public static final int TOK_UPDATE_TABLE=876;
    public static final int TOK_USER=877;
    public static final int TOK_USERSCRIPTCOLNAMES=878;
    public static final int TOK_USERSCRIPTCOLSCHEMA=879;
    public static final int TOK_VALUES_TABLE=880;
    public static final int TOK_VALUE_ROW=881;
    public static final int TOK_VARCHAR=882;
    public static final int TOK_VIEWPARTCOLS=883;
    public static final int TOK_VIRTUAL_TABLE=884;
    public static final int TOK_VIRTUAL_TABREF=885;
    public static final int TOK_WHERE=886;
    public static final int TOK_WINDOWDEF=887;
    public static final int TOK_WINDOWRANGE=888;
    public static final int TOK_WINDOWSPEC=889;
    public static final int TOK_WINDOWVALUES=890;

    // delegates
    public HiveParser_SelectClauseParser gSelectClauseParser;
    public HiveParser_FromClauseParser gFromClauseParser;
    public HiveParser_IdentifiersParser gIdentifiersParser;
    public Parser[] getDelegates() {
        return new Parser[] {gSelectClauseParser, gFromClauseParser, gIdentifiersParser};
    }

    // delegators


    public HiveParser(TokenStream input) {
        this(input, new RecognizerSharedState());
    }
    public HiveParser(TokenStream input, RecognizerSharedState state) {
        super(input, state);
        gSelectClauseParser = new HiveParser_SelectClauseParser(input, state, this);
        gFromClauseParser = new HiveParser_FromClauseParser(input, state, this);
        gIdentifiersParser = new HiveParser_IdentifiersParser(input, state, this);
    }

protected TreeAdaptor adaptor = new CommonTreeAdaptor();

public void setTreeAdaptor(TreeAdaptor adaptor) {
    this.adaptor = adaptor;
    gSelectClauseParser.setTreeAdaptor(this.adaptor);gFromClauseParser.setTreeAdaptor(this.adaptor);gIdentifiersParser.setTreeAdaptor(this.adaptor);
}
public TreeAdaptor getTreeAdaptor() {
    return adaptor;
}
    public String[] getTokenNames() { return HiveParser.tokenNames; }
    public String getGrammarFileName() { return "org/apache/hadoop/hive/ql/parse/HiveParser.g"; }


      ArrayList errors = new ArrayList();
      Stack msgs = new Stack();

      private static HashMap xlateMap;
      static {
        xlateMap = new HashMap();

        // Keywords
        xlateMap.put("KW_TRUE", "TRUE");
        xlateMap.put("KW_FALSE", "FALSE");
        xlateMap.put("KW_ALL", "ALL");
        xlateMap.put("KW_NONE", "NONE");
        xlateMap.put("KW_DEFAULT", "DEFAULT");
        xlateMap.put("KW_AND", "AND");
        xlateMap.put("KW_OR", "OR");
        xlateMap.put("KW_NOT", "NOT");
        xlateMap.put("KW_LIKE", "LIKE");

        xlateMap.put("KW_ASC", "ASC");
        xlateMap.put("KW_DESC", "DESC");
        xlateMap.put("KW_ORDER", "ORDER");
        xlateMap.put("KW_BY", "BY");
        xlateMap.put("KW_GROUP", "GROUP");
        xlateMap.put("KW_WHERE", "WHERE");
        xlateMap.put("KW_FROM", "FROM");
        xlateMap.put("KW_AS", "AS");
        xlateMap.put("KW_SELECT", "SELECT");
        xlateMap.put("KW_DISTINCT", "DISTINCT");
        xlateMap.put("KW_INSERT", "INSERT");
        xlateMap.put("KW_OVERWRITE", "OVERWRITE");
        xlateMap.put("KW_OUTER", "OUTER");
        xlateMap.put("KW_JOIN", "JOIN");
        xlateMap.put("KW_LEFT", "LEFT");
        xlateMap.put("KW_RIGHT", "RIGHT");
        xlateMap.put("KW_FULL", "FULL");
        xlateMap.put("KW_ON", "ON");
        xlateMap.put("KW_PARTITION", "PARTITION");
        xlateMap.put("KW_PARTITIONS", "PARTITIONS");
        xlateMap.put("KW_TABLE", "TABLE");
        xlateMap.put("KW_TABLES", "TABLES");
        xlateMap.put("KW_TBLPROPERTIES", "TBLPROPERTIES");
        xlateMap.put("KW_SHOW", "SHOW");
        xlateMap.put("KW_MSCK", "MSCK");
        xlateMap.put("KW_DIRECTORY", "DIRECTORY");
        xlateMap.put("KW_LOCAL", "LOCAL");
        xlateMap.put("KW_TRANSFORM", "TRANSFORM");
        xlateMap.put("KW_USING", "USING");
        xlateMap.put("KW_CLUSTER", "CLUSTER");
        xlateMap.put("KW_DISTRIBUTE", "DISTRIBUTE");
        xlateMap.put("KW_SORT", "SORT");
        xlateMap.put("KW_UNION", "UNION");
        xlateMap.put("KW_LOAD", "LOAD");
        xlateMap.put("KW_DATA", "DATA");
        xlateMap.put("KW_INPATH", "INPATH");
        xlateMap.put("KW_IS", "IS");
        xlateMap.put("KW_NULL", "NULL");
        xlateMap.put("KW_CREATE", "CREATE");
        xlateMap.put("KW_EXTERNAL", "EXTERNAL");
        xlateMap.put("KW_ALTER", "ALTER");
        xlateMap.put("KW_DESCRIBE", "DESCRIBE");
        xlateMap.put("KW_DROP", "DROP");
        xlateMap.put("KW_RENAME", "RENAME");
        xlateMap.put("KW_TO", "TO");
        xlateMap.put("KW_COMMENT", "COMMENT");
        xlateMap.put("KW_BOOLEAN", "BOOLEAN");
        xlateMap.put("KW_TINYINT", "TINYINT");
        xlateMap.put("KW_SMALLINT", "SMALLINT");
        xlateMap.put("KW_INT", "INT");
        xlateMap.put("KW_BIGINT", "BIGINT");
        xlateMap.put("KW_FLOAT", "FLOAT");
        xlateMap.put("KW_DOUBLE", "DOUBLE");
        xlateMap.put("KW_DATE", "DATE");
        xlateMap.put("KW_DATETIME", "DATETIME");
        xlateMap.put("KW_TIMESTAMP", "TIMESTAMP");
        xlateMap.put("KW_STRING", "STRING");
        xlateMap.put("KW_BINARY", "BINARY");
        xlateMap.put("KW_ARRAY", "ARRAY");
        xlateMap.put("KW_MAP", "MAP");
        xlateMap.put("KW_REDUCE", "REDUCE");
        xlateMap.put("KW_PARTITIONED", "PARTITIONED");
        xlateMap.put("KW_CLUSTERED", "CLUSTERED");
        xlateMap.put("KW_SORTED", "SORTED");
        xlateMap.put("KW_INTO", "INTO");
        xlateMap.put("KW_BUCKETS", "BUCKETS");
        xlateMap.put("KW_ROW", "ROW");
        xlateMap.put("KW_FORMAT", "FORMAT");
        xlateMap.put("KW_DELIMITED", "DELIMITED");
        xlateMap.put("KW_FIELDS", "FIELDS");
        xlateMap.put("KW_TERMINATED", "TERMINATED");
        xlateMap.put("KW_COLLECTION", "COLLECTION");
        xlateMap.put("KW_ITEMS", "ITEMS");
        xlateMap.put("KW_KEYS", "KEYS");
        xlateMap.put("KW_KEY_TYPE", "$KEY$");
        xlateMap.put("KW_LINES", "LINES");
        xlateMap.put("KW_STORED", "STORED");
        xlateMap.put("KW_SEQUENCEFILE", "SEQUENCEFILE");
        xlateMap.put("KW_TEXTFILE", "TEXTFILE");
        xlateMap.put("KW_INPUTFORMAT", "INPUTFORMAT");
        xlateMap.put("KW_OUTPUTFORMAT", "OUTPUTFORMAT");
        xlateMap.put("KW_LOCATION", "LOCATION");
        xlateMap.put("KW_TABLESAMPLE", "TABLESAMPLE");
        xlateMap.put("KW_BUCKET", "BUCKET");
        xlateMap.put("KW_OUT", "OUT");
        xlateMap.put("KW_OF", "OF");
        xlateMap.put("KW_CAST", "CAST");
        xlateMap.put("KW_ADD", "ADD");
        xlateMap.put("KW_REPLACE", "REPLACE");
        xlateMap.put("KW_COLUMNS", "COLUMNS");
        xlateMap.put("KW_RLIKE", "RLIKE");
        xlateMap.put("KW_REGEXP", "REGEXP");
        xlateMap.put("KW_TEMPORARY", "TEMPORARY");
        xlateMap.put("KW_FUNCTION", "FUNCTION");
        xlateMap.put("KW_EXPLAIN", "EXPLAIN");
        xlateMap.put("KW_EXTENDED", "EXTENDED");
        xlateMap.put("KW_SERDE", "SERDE");
        xlateMap.put("KW_WITH", "WITH");
        xlateMap.put("KW_SERDEPROPERTIES", "SERDEPROPERTIES");
        xlateMap.put("KW_LIMIT", "LIMIT");
        xlateMap.put("KW_SET", "SET");
        xlateMap.put("KW_PROPERTIES", "TBLPROPERTIES");
        xlateMap.put("KW_VALUE_TYPE", "$VALUE$");
        xlateMap.put("KW_ELEM_TYPE", "$ELEM$");
        xlateMap.put("KW_DEFINED", "DEFINED");
        xlateMap.put("KW_SUBQUERY", "SUBQUERY");
        xlateMap.put("KW_REWRITE", "REWRITE");
        xlateMap.put("KW_UPDATE", "UPDATE");
        xlateMap.put("KW_VALUES", "VALUES");
        xlateMap.put("KW_PURGE", "PURGE");


        // Operators
        xlateMap.put("DOT", ".");
        xlateMap.put("COLON", ":");
        xlateMap.put("COMMA", ",");
        xlateMap.put("SEMICOLON", ");");

        xlateMap.put("LPAREN", "(");
        xlateMap.put("RPAREN", ")");
        xlateMap.put("LSQUARE", "[");
        xlateMap.put("RSQUARE", "]");

        xlateMap.put("EQUAL", "=");
        xlateMap.put("NOTEQUAL", "<>");
        xlateMap.put("EQUAL_NS", "<=>");
        xlateMap.put("LESSTHANOREQUALTO", "<=");
        xlateMap.put("LESSTHAN", "<");
        xlateMap.put("GREATERTHANOREQUALTO", ">=");
        xlateMap.put("GREATERTHAN", ">");

        xlateMap.put("DIVIDE", "/");
        xlateMap.put("PLUS", "+");
        xlateMap.put("MINUS", "-");
        xlateMap.put("STAR", "*");
        xlateMap.put("MOD", "%");

        xlateMap.put("AMPERSAND", "&");
        xlateMap.put("TILDE", "~");
        xlateMap.put("BITWISEOR", "|");
        xlateMap.put("BITWISEXOR", "^");
        xlateMap.put("CharSetLiteral", "\\'");
      }

      public static Collection getKeywords() {
        return xlateMap.values();
      }

      private static String xlate(String name) {

        String ret = xlateMap.get(name);
        if (ret == null) {
          ret = name;
        }

        return ret;
      }

      @Override
      public Object recoverFromMismatchedSet(IntStream input,
          RecognitionException re, BitSet follow) throws RecognitionException {
        throw re;
      }

      @Override
      public void displayRecognitionError(String[] tokenNames,
          RecognitionException e) {
        errors.add(new ParseError(this, e, tokenNames));
      }

      @Override
      public String getErrorHeader(RecognitionException e) {
        String header = null;
        if (e.charPositionInLine < 0 && input.LT(-1) != null) {
          Token t = input.LT(-1);
          header = "line " + t.getLine() + ":" + t.getCharPositionInLine();
        } else {
          header = super.getErrorHeader(e);
        }

        return header;
      }
      
      @Override
      public String getErrorMessage(RecognitionException e, String[] tokenNames) {
        String msg = null;

        // Translate the token names to something that the user can understand
        String[] xlateNames = new String[tokenNames.length];
        for (int i = 0; i < tokenNames.length; ++i) {
          xlateNames[i] = HiveParser.xlate(tokenNames[i]);
        }

        if (e instanceof NoViableAltException) {
          @SuppressWarnings("unused")
          NoViableAltException nvae = (NoViableAltException) e;
          // for development, can add
          // "decision=<<"+nvae.grammarDecisionDescription+">>"
          // and "(decision="+nvae.decisionNumber+") and
          // "state "+nvae.stateNumber
          msg = "cannot recognize input near"
                  + (input.LT(1) != null ? " " + getTokenErrorDisplay(input.LT(1)) : "")
                  + (input.LT(2) != null ? " " + getTokenErrorDisplay(input.LT(2)) : "")
                  + (input.LT(3) != null ? " " + getTokenErrorDisplay(input.LT(3)) : "");
        } else if (e instanceof MismatchedTokenException) {
          MismatchedTokenException mte = (MismatchedTokenException) e;
          msg = super.getErrorMessage(e, xlateNames) + (input.LT(-1) == null ? "":" near '" + input.LT(-1).getText()) + "'";
        } else if (e instanceof FailedPredicateException) {
          FailedPredicateException fpe = (FailedPredicateException) e;
          msg = "Failed to recognize predicate '" + fpe.token.getText() + "'. Failed rule: '" + fpe.ruleName + "'";
        } else {
          msg = super.getErrorMessage(e, xlateNames);
        }

        if (msgs.size() > 0) {
          msg = msg + " in " + msgs.peek();
        }
        return msg;
      }
      
      public void pushMsg(String msg, RecognizerSharedState state) {
        // ANTLR generated code does not wrap the @init code wit this backtracking check,
        //  even if the matching @after has it. If we have parser rules with that are doing
        // some lookahead with syntactic predicates this can cause the push() and pop() calls
        // to become unbalanced, so make sure both push/pop check the backtracking state.
        if (state.backtracking == 0) {
          msgs.push(msg);
        }
      }

      public void popMsg(RecognizerSharedState state) {
        if (state.backtracking == 0) {
          msgs.pop();
        }
      }

      // counter to generate unique union aliases
      private int aliasCounter;
      
      private String generateUnionAlias() {
        return "_u" + (++aliasCounter);
      }


    public static class statement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "statement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:626:1: statement : ( explainStatement EOF | execStatement EOF );
    public final HiveParser.statement_return statement() throws RecognitionException {
        HiveParser.statement_return retval = new HiveParser.statement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token EOF2=null;
        Token EOF4=null;
        HiveParser.explainStatement_return explainStatement1 =null;

        HiveParser.execStatement_return execStatement3 =null;


        CommonTree EOF2_tree=null;
        CommonTree EOF4_tree=null;

        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:627:2: ( explainStatement EOF | execStatement EOF )
            int alt1=2;
            switch ( input.LA(1) ) {
            case KW_EXPLAIN:
                {
                alt1=1;
                }
                break;
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_CREATE:
            case KW_DELETE:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DROP:
            case KW_EXPORT:
            case KW_FROM:
            case KW_GRANT:
            case KW_IMPORT:
            case KW_INSERT:
            case KW_LOAD:
            case KW_LOCK:
            case KW_MAP:
            case KW_MSCK:
            case KW_REDUCE:
            case KW_REVOKE:
            case KW_SELECT:
            case KW_SET:
            case KW_SHOW:
            case KW_TRUNCATE:
            case KW_UNLOCK:
            case KW_UPDATE:
            case KW_USE:
            case KW_WITH:
                {
                alt1=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 1, 0, input);

                throw nvae;

            }

            switch (alt1) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:627:4: explainStatement EOF
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_explainStatement_in_statement1022);
                    explainStatement1=explainStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, explainStatement1.getTree());

                    EOF2=(Token)match(input,EOF,FOLLOW_EOF_in_statement1024); 
                    EOF2_tree = 
                    (CommonTree)adaptor.create(EOF2)
                    ;
                    adaptor.addChild(root_0, EOF2_tree);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:628:4: execStatement EOF
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_execStatement_in_statement1029);
                    execStatement3=execStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, execStatement3.getTree());

                    EOF4=(Token)match(input,EOF,FOLLOW_EOF_in_statement1031); 
                    EOF4_tree = 
                    (CommonTree)adaptor.create(EOF4)
                    ;
                    adaptor.addChild(root_0, EOF4_tree);


                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "statement"


    public static class explainStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "explainStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:631:1: explainStatement : KW_EXPLAIN ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression[true] -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) ) ;
    public final HiveParser.explainStatement_return explainStatement() throws RecognitionException {
        HiveParser.explainStatement_return retval = new HiveParser.explainStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_EXPLAIN5=null;
        Token KW_REWRITE8=null;
        HiveParser.explainOption_return explainOption6 =null;

        HiveParser.execStatement_return execStatement7 =null;

        HiveParser.queryStatementExpression_return queryStatementExpression9 =null;


        CommonTree KW_EXPLAIN5_tree=null;
        CommonTree KW_REWRITE8_tree=null;
        RewriteRuleTokenStream stream_KW_REWRITE=new RewriteRuleTokenStream(adaptor,"token KW_REWRITE");
        RewriteRuleTokenStream stream_KW_EXPLAIN=new RewriteRuleTokenStream(adaptor,"token KW_EXPLAIN");
        RewriteRuleSubtreeStream stream_execStatement=new RewriteRuleSubtreeStream(adaptor,"rule execStatement");
        RewriteRuleSubtreeStream stream_explainOption=new RewriteRuleSubtreeStream(adaptor,"rule explainOption");
        RewriteRuleSubtreeStream stream_queryStatementExpression=new RewriteRuleSubtreeStream(adaptor,"rule queryStatementExpression");
         pushMsg("explain statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:634:2: ( KW_EXPLAIN ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression[true] -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:634:4: KW_EXPLAIN ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression[true] -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) )
            {
            KW_EXPLAIN5=(Token)match(input,KW_EXPLAIN,FOLLOW_KW_EXPLAIN_in_explainStatement1052);  
            stream_KW_EXPLAIN.add(KW_EXPLAIN5);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:634:15: ( ( explainOption )* execStatement -> ^( TOK_EXPLAIN execStatement ( explainOption )* ) | KW_REWRITE queryStatementExpression[true] -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression ) )
            int alt3=2;
            switch ( input.LA(1) ) {
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_AUTHORIZATION:
            case KW_CREATE:
            case KW_DELETE:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DROP:
            case KW_EXPORT:
            case KW_EXTENDED:
            case KW_FORMATTED:
            case KW_FROM:
            case KW_GRANT:
            case KW_IMPORT:
            case KW_INSERT:
            case KW_LOAD:
            case KW_LOCK:
            case KW_LOGICAL:
            case KW_MAP:
            case KW_MSCK:
            case KW_REDUCE:
            case KW_REVOKE:
            case KW_SELECT:
            case KW_SET:
            case KW_SHOW:
            case KW_TRUNCATE:
            case KW_UNLOCK:
            case KW_UPDATE:
            case KW_USE:
            case KW_WITH:
                {
                alt3=1;
                }
                break;
            case KW_REWRITE:
                {
                alt3=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 3, 0, input);

                throw nvae;

            }

            switch (alt3) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:635:6: ( explainOption )* execStatement
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:635:6: ( explainOption )*
                    loop2:
                    do {
                        int alt2=2;
                        switch ( input.LA(1) ) {
                        case KW_AUTHORIZATION:
                        case KW_DEPENDENCY:
                        case KW_EXTENDED:
                        case KW_FORMATTED:
                        case KW_LOGICAL:
                            {
                            alt2=1;
                            }
                            break;

                        }

                        switch (alt2) {
                    	case 1 :
                    	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:635:6: explainOption
                    	    {
                    	    pushFollow(FOLLOW_explainOption_in_explainStatement1061);
                    	    explainOption6=explainOption();

                    	    state._fsp--;

                    	    stream_explainOption.add(explainOption6.getTree());

                    	    }
                    	    break;

                    	default :
                    	    break loop2;
                        }
                    } while (true);


                    pushFollow(FOLLOW_execStatement_in_explainStatement1064);
                    execStatement7=execStatement();

                    state._fsp--;

                    stream_execStatement.add(execStatement7.getTree());

                    // AST REWRITE
                    // elements: explainOption, execStatement
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 635:35: -> ^( TOK_EXPLAIN execStatement ( explainOption )* )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:635:38: ^( TOK_EXPLAIN execStatement ( explainOption )* )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_EXPLAIN, "TOK_EXPLAIN")
                        , root_1);

                        adaptor.addChild(root_1, stream_execStatement.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:635:66: ( explainOption )*
                        while ( stream_explainOption.hasNext() ) {
                            adaptor.addChild(root_1, stream_explainOption.nextTree());

                        }
                        stream_explainOption.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:637:9: KW_REWRITE queryStatementExpression[true]
                    {
                    KW_REWRITE8=(Token)match(input,KW_REWRITE,FOLLOW_KW_REWRITE_in_explainStatement1095);  
                    stream_KW_REWRITE.add(KW_REWRITE8);


                    pushFollow(FOLLOW_queryStatementExpression_in_explainStatement1097);
                    queryStatementExpression9=queryStatementExpression(true);

                    state._fsp--;

                    stream_queryStatementExpression.add(queryStatementExpression9.getTree());

                    // AST REWRITE
                    // elements: queryStatementExpression
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 637:51: -> ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:637:54: ^( TOK_EXPLAIN_SQ_REWRITE queryStatementExpression )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_EXPLAIN_SQ_REWRITE, "TOK_EXPLAIN_SQ_REWRITE")
                        , root_1);

                        adaptor.addChild(root_1, stream_queryStatementExpression.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }


            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "explainStatement"


    public static class explainOption_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "explainOption"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:640:1: explainOption : ( KW_EXTENDED | KW_FORMATTED | KW_DEPENDENCY | KW_LOGICAL | KW_AUTHORIZATION );
    public final HiveParser.explainOption_return explainOption() throws RecognitionException {
        HiveParser.explainOption_return retval = new HiveParser.explainOption_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token set10=null;

        CommonTree set10_tree=null;

         msgs.push("explain option"); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:643:5: ( KW_EXTENDED | KW_FORMATTED | KW_DEPENDENCY | KW_LOGICAL | KW_AUTHORIZATION )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:
            {
            root_0 = (CommonTree)adaptor.nil();


            set10=(Token)input.LT(1);

            if ( input.LA(1)==KW_AUTHORIZATION||input.LA(1)==KW_DEPENDENCY||input.LA(1)==KW_EXTENDED||input.LA(1)==KW_FORMATTED||input.LA(1)==KW_LOGICAL ) {
                input.consume();
                adaptor.addChild(root_0, 
                (CommonTree)adaptor.create(set10)
                );
                state.errorRecovery=false;
            }
            else {
                MismatchedSetException mse = new MismatchedSetException(null,input);
                throw mse;
            }


            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             msgs.pop(); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "explainOption"


    public static class execStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "execStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:646:1: execStatement : ( queryStatementExpression[true] | loadStatement | exportStatement | importStatement | ddlStatement | deleteStatement | updateStatement );
    public final HiveParser.execStatement_return execStatement() throws RecognitionException {
        HiveParser.execStatement_return retval = new HiveParser.execStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.queryStatementExpression_return queryStatementExpression11 =null;

        HiveParser.loadStatement_return loadStatement12 =null;

        HiveParser.exportStatement_return exportStatement13 =null;

        HiveParser.importStatement_return importStatement14 =null;

        HiveParser.ddlStatement_return ddlStatement15 =null;

        HiveParser.deleteStatement_return deleteStatement16 =null;

        HiveParser.updateStatement_return updateStatement17 =null;



         pushMsg("statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:649:5: ( queryStatementExpression[true] | loadStatement | exportStatement | importStatement | ddlStatement | deleteStatement | updateStatement )
            int alt4=7;
            switch ( input.LA(1) ) {
            case KW_FROM:
            case KW_INSERT:
            case KW_MAP:
            case KW_REDUCE:
            case KW_SELECT:
            case KW_WITH:
                {
                alt4=1;
                }
                break;
            case KW_LOAD:
                {
                alt4=2;
                }
                break;
            case KW_EXPORT:
                {
                alt4=3;
                }
                break;
            case KW_IMPORT:
                {
                alt4=4;
                }
                break;
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_CREATE:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DROP:
            case KW_GRANT:
            case KW_LOCK:
            case KW_MSCK:
            case KW_REVOKE:
            case KW_SET:
            case KW_SHOW:
            case KW_TRUNCATE:
            case KW_UNLOCK:
            case KW_USE:
                {
                alt4=5;
                }
                break;
            case KW_DELETE:
                {
                alt4=6;
                }
                break;
            case KW_UPDATE:
                {
                alt4=7;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 4, 0, input);

                throw nvae;

            }

            switch (alt4) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:649:7: queryStatementExpression[true]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_queryStatementExpression_in_execStatement1166);
                    queryStatementExpression11=queryStatementExpression(true);

                    state._fsp--;

                    adaptor.addChild(root_0, queryStatementExpression11.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:650:7: loadStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_loadStatement_in_execStatement1175);
                    loadStatement12=loadStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, loadStatement12.getTree());

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:651:7: exportStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_exportStatement_in_execStatement1183);
                    exportStatement13=exportStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, exportStatement13.getTree());

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:652:7: importStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_importStatement_in_execStatement1191);
                    importStatement14=importStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, importStatement14.getTree());

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:653:7: ddlStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_ddlStatement_in_execStatement1199);
                    ddlStatement15=ddlStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, ddlStatement15.getTree());

                    }
                    break;
                case 6 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:654:7: deleteStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_deleteStatement_in_execStatement1207);
                    deleteStatement16=deleteStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, deleteStatement16.getTree());

                    }
                    break;
                case 7 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:655:7: updateStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_updateStatement_in_execStatement1215);
                    updateStatement17=updateStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, updateStatement17.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "execStatement"


    public static class loadStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "loadStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:658:1: loadStatement : KW_LOAD KW_DATA (islocal= KW_LOCAL )? KW_INPATH (path= StringLiteral ) (isoverwrite= KW_OVERWRITE )? KW_INTO KW_TABLE (tab= tableOrPartition ) -> ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? ) ;
    public final HiveParser.loadStatement_return loadStatement() throws RecognitionException {
        HiveParser.loadStatement_return retval = new HiveParser.loadStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token islocal=null;
        Token path=null;
        Token isoverwrite=null;
        Token KW_LOAD18=null;
        Token KW_DATA19=null;
        Token KW_INPATH20=null;
        Token KW_INTO21=null;
        Token KW_TABLE22=null;
        HiveParser_IdentifiersParser.tableOrPartition_return tab =null;


        CommonTree islocal_tree=null;
        CommonTree path_tree=null;
        CommonTree isoverwrite_tree=null;
        CommonTree KW_LOAD18_tree=null;
        CommonTree KW_DATA19_tree=null;
        CommonTree KW_INPATH20_tree=null;
        CommonTree KW_INTO21_tree=null;
        CommonTree KW_TABLE22_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_INPATH=new RewriteRuleTokenStream(adaptor,"token KW_INPATH");
        RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
        RewriteRuleTokenStream stream_KW_LOCAL=new RewriteRuleTokenStream(adaptor,"token KW_LOCAL");
        RewriteRuleTokenStream stream_KW_OVERWRITE=new RewriteRuleTokenStream(adaptor,"token KW_OVERWRITE");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_KW_LOAD=new RewriteRuleTokenStream(adaptor,"token KW_LOAD");
        RewriteRuleTokenStream stream_KW_DATA=new RewriteRuleTokenStream(adaptor,"token KW_DATA");
        RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
         pushMsg("load statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:5: ( KW_LOAD KW_DATA (islocal= KW_LOCAL )? KW_INPATH (path= StringLiteral ) (isoverwrite= KW_OVERWRITE )? KW_INTO KW_TABLE (tab= tableOrPartition ) -> ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:7: KW_LOAD KW_DATA (islocal= KW_LOCAL )? KW_INPATH (path= StringLiteral ) (isoverwrite= KW_OVERWRITE )? KW_INTO KW_TABLE (tab= tableOrPartition )
            {
            KW_LOAD18=(Token)match(input,KW_LOAD,FOLLOW_KW_LOAD_in_loadStatement1242);  
            stream_KW_LOAD.add(KW_LOAD18);


            KW_DATA19=(Token)match(input,KW_DATA,FOLLOW_KW_DATA_in_loadStatement1244);  
            stream_KW_DATA.add(KW_DATA19);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:23: (islocal= KW_LOCAL )?
            int alt5=2;
            switch ( input.LA(1) ) {
                case KW_LOCAL:
                    {
                    alt5=1;
                    }
                    break;
            }

            switch (alt5) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:24: islocal= KW_LOCAL
                    {
                    islocal=(Token)match(input,KW_LOCAL,FOLLOW_KW_LOCAL_in_loadStatement1249);  
                    stream_KW_LOCAL.add(islocal);


                    }
                    break;

            }


            KW_INPATH20=(Token)match(input,KW_INPATH,FOLLOW_KW_INPATH_in_loadStatement1253);  
            stream_KW_INPATH.add(KW_INPATH20);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:53: (path= StringLiteral )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:54: path= StringLiteral
            {
            path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_loadStatement1258);  
            stream_StringLiteral.add(path);


            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:74: (isoverwrite= KW_OVERWRITE )?
            int alt6=2;
            switch ( input.LA(1) ) {
                case KW_OVERWRITE:
                    {
                    alt6=1;
                    }
                    break;
            }

            switch (alt6) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:75: isoverwrite= KW_OVERWRITE
                    {
                    isoverwrite=(Token)match(input,KW_OVERWRITE,FOLLOW_KW_OVERWRITE_in_loadStatement1264);  
                    stream_KW_OVERWRITE.add(isoverwrite);


                    }
                    break;

            }


            KW_INTO21=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_loadStatement1268);  
            stream_KW_INTO.add(KW_INTO21);


            KW_TABLE22=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_loadStatement1270);  
            stream_KW_TABLE.add(KW_TABLE22);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:119: (tab= tableOrPartition )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:661:120: tab= tableOrPartition
            {
            pushFollow(FOLLOW_tableOrPartition_in_loadStatement1275);
            tab=tableOrPartition();

            state._fsp--;

            stream_tableOrPartition.add(tab.getTree());

            }


            // AST REWRITE
            // elements: path, islocal, isoverwrite, tab
            // token labels: islocal, isoverwrite, path
            // rule labels: retval, tab
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_islocal=new RewriteRuleTokenStream(adaptor,"token islocal",islocal);
            RewriteRuleTokenStream stream_isoverwrite=new RewriteRuleTokenStream(adaptor,"token isoverwrite",isoverwrite);
            RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 662:5: -> ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:662:8: ^( TOK_LOAD $path $tab ( $islocal)? ( $isoverwrite)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_LOAD, "TOK_LOAD")
                , root_1);

                adaptor.addChild(root_1, stream_path.nextNode());

                adaptor.addChild(root_1, stream_tab.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:662:31: ( $islocal)?
                if ( stream_islocal.hasNext() ) {
                    adaptor.addChild(root_1, stream_islocal.nextNode());

                }
                stream_islocal.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:662:41: ( $isoverwrite)?
                if ( stream_isoverwrite.hasNext() ) {
                    adaptor.addChild(root_1, stream_isoverwrite.nextNode());

                }
                stream_isoverwrite.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "loadStatement"


    public static class exportStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "exportStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:665:1: exportStatement : KW_EXPORT KW_TABLE (tab= tableOrPartition ) KW_TO (path= StringLiteral ) -> ^( TOK_EXPORT $tab $path) ;
    public final HiveParser.exportStatement_return exportStatement() throws RecognitionException {
        HiveParser.exportStatement_return retval = new HiveParser.exportStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token path=null;
        Token KW_EXPORT23=null;
        Token KW_TABLE24=null;
        Token KW_TO25=null;
        HiveParser_IdentifiersParser.tableOrPartition_return tab =null;


        CommonTree path_tree=null;
        CommonTree KW_EXPORT23_tree=null;
        CommonTree KW_TABLE24_tree=null;
        CommonTree KW_TO25_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_EXPORT=new RewriteRuleTokenStream(adaptor,"token KW_EXPORT");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
        RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
         pushMsg("export statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:668:5: ( KW_EXPORT KW_TABLE (tab= tableOrPartition ) KW_TO (path= StringLiteral ) -> ^( TOK_EXPORT $tab $path) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:668:7: KW_EXPORT KW_TABLE (tab= tableOrPartition ) KW_TO (path= StringLiteral )
            {
            KW_EXPORT23=(Token)match(input,KW_EXPORT,FOLLOW_KW_EXPORT_in_exportStatement1327);  
            stream_KW_EXPORT.add(KW_EXPORT23);


            KW_TABLE24=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_exportStatement1329);  
            stream_KW_TABLE.add(KW_TABLE24);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:668:26: (tab= tableOrPartition )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:668:27: tab= tableOrPartition
            {
            pushFollow(FOLLOW_tableOrPartition_in_exportStatement1334);
            tab=tableOrPartition();

            state._fsp--;

            stream_tableOrPartition.add(tab.getTree());

            }


            KW_TO25=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_exportStatement1337);  
            stream_KW_TO.add(KW_TO25);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:668:55: (path= StringLiteral )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:668:56: path= StringLiteral
            {
            path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_exportStatement1342);  
            stream_StringLiteral.add(path);


            }


            // AST REWRITE
            // elements: tab, path
            // token labels: path
            // rule labels: retval, tab
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 669:5: -> ^( TOK_EXPORT $tab $path)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:669:8: ^( TOK_EXPORT $tab $path)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_EXPORT, "TOK_EXPORT")
                , root_1);

                adaptor.addChild(root_1, stream_tab.nextTree());

                adaptor.addChild(root_1, stream_path.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "exportStatement"


    public static class importStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "importStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:672:1: importStatement : KW_IMPORT ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )? KW_FROM (path= StringLiteral ) ( tableLocation )? -> ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? ) ;
    public final HiveParser.importStatement_return importStatement() throws RecognitionException {
        HiveParser.importStatement_return retval = new HiveParser.importStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token ext=null;
        Token path=null;
        Token KW_IMPORT26=null;
        Token KW_TABLE27=null;
        Token KW_FROM28=null;
        HiveParser_IdentifiersParser.tableOrPartition_return tab =null;

        HiveParser.tableLocation_return tableLocation29 =null;


        CommonTree ext_tree=null;
        CommonTree path_tree=null;
        CommonTree KW_IMPORT26_tree=null;
        CommonTree KW_TABLE27_tree=null;
        CommonTree KW_FROM28_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_IMPORT=new RewriteRuleTokenStream(adaptor,"token KW_IMPORT");
        RewriteRuleTokenStream stream_KW_EXTERNAL=new RewriteRuleTokenStream(adaptor,"token KW_EXTERNAL");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
        RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
        RewriteRuleSubtreeStream stream_tableLocation=new RewriteRuleSubtreeStream(adaptor,"rule tableLocation");
         pushMsg("import statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:2: ( KW_IMPORT ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )? KW_FROM (path= StringLiteral ) ( tableLocation )? -> ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:4: KW_IMPORT ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )? KW_FROM (path= StringLiteral ) ( tableLocation )?
            {
            KW_IMPORT26=(Token)match(input,KW_IMPORT,FOLLOW_KW_IMPORT_in_importStatement1383);  
            stream_KW_IMPORT.add(KW_IMPORT26);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:14: ( (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition ) )?
            int alt8=2;
            switch ( input.LA(1) ) {
                case KW_EXTERNAL:
                case KW_TABLE:
                    {
                    alt8=1;
                    }
                    break;
            }

            switch (alt8) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:15: (ext= KW_EXTERNAL )? KW_TABLE (tab= tableOrPartition )
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:15: (ext= KW_EXTERNAL )?
                    int alt7=2;
                    switch ( input.LA(1) ) {
                        case KW_EXTERNAL:
                            {
                            alt7=1;
                            }
                            break;
                    }

                    switch (alt7) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:16: ext= KW_EXTERNAL
                            {
                            ext=(Token)match(input,KW_EXTERNAL,FOLLOW_KW_EXTERNAL_in_importStatement1389);  
                            stream_KW_EXTERNAL.add(ext);


                            }
                            break;

                    }


                    KW_TABLE27=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_importStatement1393);  
                    stream_KW_TABLE.add(KW_TABLE27);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:43: (tab= tableOrPartition )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:44: tab= tableOrPartition
                    {
                    pushFollow(FOLLOW_tableOrPartition_in_importStatement1398);
                    tab=tableOrPartition();

                    state._fsp--;

                    stream_tableOrPartition.add(tab.getTree());

                    }


                    }
                    break;

            }


            KW_FROM28=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_importStatement1403);  
            stream_KW_FROM.add(KW_FROM28);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:76: (path= StringLiteral )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:77: path= StringLiteral
            {
            path=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_importStatement1408);  
            stream_StringLiteral.add(path);


            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:97: ( tableLocation )?
            int alt9=2;
            switch ( input.LA(1) ) {
                case KW_LOCATION:
                    {
                    alt9=1;
                    }
                    break;
            }

            switch (alt9) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:675:97: tableLocation
                    {
                    pushFollow(FOLLOW_tableLocation_in_importStatement1411);
                    tableLocation29=tableLocation();

                    state._fsp--;

                    stream_tableLocation.add(tableLocation29.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: tab, tableLocation, ext, path
            // token labels: path, ext
            // rule labels: retval, tab
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_path=new RewriteRuleTokenStream(adaptor,"token path",path);
            RewriteRuleTokenStream stream_ext=new RewriteRuleTokenStream(adaptor,"token ext",ext);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 676:5: -> ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:676:8: ^( TOK_IMPORT $path ( $tab)? ( $ext)? ( tableLocation )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_IMPORT, "TOK_IMPORT")
                , root_1);

                adaptor.addChild(root_1, stream_path.nextNode());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:676:28: ( $tab)?
                if ( stream_tab.hasNext() ) {
                    adaptor.addChild(root_1, stream_tab.nextTree());

                }
                stream_tab.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:676:34: ( $ext)?
                if ( stream_ext.hasNext() ) {
                    adaptor.addChild(root_1, stream_ext.nextNode());

                }
                stream_ext.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:676:39: ( tableLocation )?
                if ( stream_tableLocation.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableLocation.nextTree());

                }
                stream_tableLocation.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "importStatement"


    public static class ddlStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "ddlStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:679:1: ddlStatement : ( createDatabaseStatement | switchDatabaseStatement | dropDatabaseStatement | createTableStatement | dropTableStatement | truncateTableStatement | alterStatement | descStatement | showStatement | metastoreCheck | createViewStatement | dropViewStatement | createFunctionStatement | createMacroStatement | createIndexStatement | dropIndexStatement | dropFunctionStatement | dropMacroStatement | analyzeStatement | lockStatement | unlockStatement | lockDatabase | unlockDatabase | createRoleStatement | dropRoleStatement | grantPrivileges | revokePrivileges | showGrants | showRoleGrants | showRolePrincipals | showRoles | grantRole | revokeRole | setRole | showCurrentRole );
    public final HiveParser.ddlStatement_return ddlStatement() throws RecognitionException {
        HiveParser.ddlStatement_return retval = new HiveParser.ddlStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.createDatabaseStatement_return createDatabaseStatement30 =null;

        HiveParser.switchDatabaseStatement_return switchDatabaseStatement31 =null;

        HiveParser.dropDatabaseStatement_return dropDatabaseStatement32 =null;

        HiveParser.createTableStatement_return createTableStatement33 =null;

        HiveParser.dropTableStatement_return dropTableStatement34 =null;

        HiveParser.truncateTableStatement_return truncateTableStatement35 =null;

        HiveParser.alterStatement_return alterStatement36 =null;

        HiveParser.descStatement_return descStatement37 =null;

        HiveParser.showStatement_return showStatement38 =null;

        HiveParser.metastoreCheck_return metastoreCheck39 =null;

        HiveParser.createViewStatement_return createViewStatement40 =null;

        HiveParser.dropViewStatement_return dropViewStatement41 =null;

        HiveParser.createFunctionStatement_return createFunctionStatement42 =null;

        HiveParser.createMacroStatement_return createMacroStatement43 =null;

        HiveParser.createIndexStatement_return createIndexStatement44 =null;

        HiveParser.dropIndexStatement_return dropIndexStatement45 =null;

        HiveParser.dropFunctionStatement_return dropFunctionStatement46 =null;

        HiveParser.dropMacroStatement_return dropMacroStatement47 =null;

        HiveParser.analyzeStatement_return analyzeStatement48 =null;

        HiveParser.lockStatement_return lockStatement49 =null;

        HiveParser.unlockStatement_return unlockStatement50 =null;

        HiveParser.lockDatabase_return lockDatabase51 =null;

        HiveParser.unlockDatabase_return unlockDatabase52 =null;

        HiveParser.createRoleStatement_return createRoleStatement53 =null;

        HiveParser.dropRoleStatement_return dropRoleStatement54 =null;

        HiveParser.grantPrivileges_return grantPrivileges55 =null;

        HiveParser.revokePrivileges_return revokePrivileges56 =null;

        HiveParser.showGrants_return showGrants57 =null;

        HiveParser.showRoleGrants_return showRoleGrants58 =null;

        HiveParser.showRolePrincipals_return showRolePrincipals59 =null;

        HiveParser.showRoles_return showRoles60 =null;

        HiveParser.grantRole_return grantRole61 =null;

        HiveParser.revokeRole_return revokeRole62 =null;

        HiveParser.setRole_return setRole63 =null;

        HiveParser.showCurrentRole_return showCurrentRole64 =null;



         pushMsg("ddl statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:682:5: ( createDatabaseStatement | switchDatabaseStatement | dropDatabaseStatement | createTableStatement | dropTableStatement | truncateTableStatement | alterStatement | descStatement | showStatement | metastoreCheck | createViewStatement | dropViewStatement | createFunctionStatement | createMacroStatement | createIndexStatement | dropIndexStatement | dropFunctionStatement | dropMacroStatement | analyzeStatement | lockStatement | unlockStatement | lockDatabase | unlockDatabase | createRoleStatement | dropRoleStatement | grantPrivileges | revokePrivileges | showGrants | showRoleGrants | showRolePrincipals | showRoles | grantRole | revokeRole | setRole | showCurrentRole )
            int alt10=35;
            alt10 = dfa10.predict(input);
            switch (alt10) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:682:7: createDatabaseStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_createDatabaseStatement_in_ddlStatement1463);
                    createDatabaseStatement30=createDatabaseStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, createDatabaseStatement30.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:683:7: switchDatabaseStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_switchDatabaseStatement_in_ddlStatement1471);
                    switchDatabaseStatement31=switchDatabaseStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, switchDatabaseStatement31.getTree());

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:684:7: dropDatabaseStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_dropDatabaseStatement_in_ddlStatement1479);
                    dropDatabaseStatement32=dropDatabaseStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, dropDatabaseStatement32.getTree());

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:685:7: createTableStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_createTableStatement_in_ddlStatement1487);
                    createTableStatement33=createTableStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, createTableStatement33.getTree());

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:686:7: dropTableStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_dropTableStatement_in_ddlStatement1495);
                    dropTableStatement34=dropTableStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, dropTableStatement34.getTree());

                    }
                    break;
                case 6 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:687:7: truncateTableStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_truncateTableStatement_in_ddlStatement1503);
                    truncateTableStatement35=truncateTableStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, truncateTableStatement35.getTree());

                    }
                    break;
                case 7 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:688:7: alterStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatement_in_ddlStatement1511);
                    alterStatement36=alterStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatement36.getTree());

                    }
                    break;
                case 8 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:689:7: descStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_descStatement_in_ddlStatement1519);
                    descStatement37=descStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, descStatement37.getTree());

                    }
                    break;
                case 9 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:690:7: showStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_showStatement_in_ddlStatement1527);
                    showStatement38=showStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, showStatement38.getTree());

                    }
                    break;
                case 10 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:691:7: metastoreCheck
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_metastoreCheck_in_ddlStatement1535);
                    metastoreCheck39=metastoreCheck();

                    state._fsp--;

                    adaptor.addChild(root_0, metastoreCheck39.getTree());

                    }
                    break;
                case 11 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:692:7: createViewStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_createViewStatement_in_ddlStatement1543);
                    createViewStatement40=createViewStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, createViewStatement40.getTree());

                    }
                    break;
                case 12 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:693:7: dropViewStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_dropViewStatement_in_ddlStatement1551);
                    dropViewStatement41=dropViewStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, dropViewStatement41.getTree());

                    }
                    break;
                case 13 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:694:7: createFunctionStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_createFunctionStatement_in_ddlStatement1559);
                    createFunctionStatement42=createFunctionStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, createFunctionStatement42.getTree());

                    }
                    break;
                case 14 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:695:7: createMacroStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_createMacroStatement_in_ddlStatement1567);
                    createMacroStatement43=createMacroStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, createMacroStatement43.getTree());

                    }
                    break;
                case 15 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:696:7: createIndexStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_createIndexStatement_in_ddlStatement1575);
                    createIndexStatement44=createIndexStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, createIndexStatement44.getTree());

                    }
                    break;
                case 16 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:697:7: dropIndexStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_dropIndexStatement_in_ddlStatement1583);
                    dropIndexStatement45=dropIndexStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, dropIndexStatement45.getTree());

                    }
                    break;
                case 17 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:698:7: dropFunctionStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_dropFunctionStatement_in_ddlStatement1591);
                    dropFunctionStatement46=dropFunctionStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, dropFunctionStatement46.getTree());

                    }
                    break;
                case 18 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:699:7: dropMacroStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_dropMacroStatement_in_ddlStatement1599);
                    dropMacroStatement47=dropMacroStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, dropMacroStatement47.getTree());

                    }
                    break;
                case 19 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:700:7: analyzeStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_analyzeStatement_in_ddlStatement1607);
                    analyzeStatement48=analyzeStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, analyzeStatement48.getTree());

                    }
                    break;
                case 20 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:701:7: lockStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_lockStatement_in_ddlStatement1615);
                    lockStatement49=lockStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, lockStatement49.getTree());

                    }
                    break;
                case 21 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:702:7: unlockStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_unlockStatement_in_ddlStatement1623);
                    unlockStatement50=unlockStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, unlockStatement50.getTree());

                    }
                    break;
                case 22 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:703:7: lockDatabase
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_lockDatabase_in_ddlStatement1631);
                    lockDatabase51=lockDatabase();

                    state._fsp--;

                    adaptor.addChild(root_0, lockDatabase51.getTree());

                    }
                    break;
                case 23 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:704:7: unlockDatabase
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_unlockDatabase_in_ddlStatement1639);
                    unlockDatabase52=unlockDatabase();

                    state._fsp--;

                    adaptor.addChild(root_0, unlockDatabase52.getTree());

                    }
                    break;
                case 24 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:705:7: createRoleStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_createRoleStatement_in_ddlStatement1647);
                    createRoleStatement53=createRoleStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, createRoleStatement53.getTree());

                    }
                    break;
                case 25 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:706:7: dropRoleStatement
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_dropRoleStatement_in_ddlStatement1655);
                    dropRoleStatement54=dropRoleStatement();

                    state._fsp--;

                    adaptor.addChild(root_0, dropRoleStatement54.getTree());

                    }
                    break;
                case 26 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:707:7: grantPrivileges
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_grantPrivileges_in_ddlStatement1663);
                    grantPrivileges55=grantPrivileges();

                    state._fsp--;

                    adaptor.addChild(root_0, grantPrivileges55.getTree());

                    }
                    break;
                case 27 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:708:7: revokePrivileges
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_revokePrivileges_in_ddlStatement1671);
                    revokePrivileges56=revokePrivileges();

                    state._fsp--;

                    adaptor.addChild(root_0, revokePrivileges56.getTree());

                    }
                    break;
                case 28 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:709:7: showGrants
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_showGrants_in_ddlStatement1679);
                    showGrants57=showGrants();

                    state._fsp--;

                    adaptor.addChild(root_0, showGrants57.getTree());

                    }
                    break;
                case 29 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:710:7: showRoleGrants
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_showRoleGrants_in_ddlStatement1687);
                    showRoleGrants58=showRoleGrants();

                    state._fsp--;

                    adaptor.addChild(root_0, showRoleGrants58.getTree());

                    }
                    break;
                case 30 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:711:7: showRolePrincipals
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_showRolePrincipals_in_ddlStatement1695);
                    showRolePrincipals59=showRolePrincipals();

                    state._fsp--;

                    adaptor.addChild(root_0, showRolePrincipals59.getTree());

                    }
                    break;
                case 31 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:712:7: showRoles
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_showRoles_in_ddlStatement1703);
                    showRoles60=showRoles();

                    state._fsp--;

                    adaptor.addChild(root_0, showRoles60.getTree());

                    }
                    break;
                case 32 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:713:7: grantRole
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_grantRole_in_ddlStatement1711);
                    grantRole61=grantRole();

                    state._fsp--;

                    adaptor.addChild(root_0, grantRole61.getTree());

                    }
                    break;
                case 33 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:714:7: revokeRole
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_revokeRole_in_ddlStatement1719);
                    revokeRole62=revokeRole();

                    state._fsp--;

                    adaptor.addChild(root_0, revokeRole62.getTree());

                    }
                    break;
                case 34 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:715:7: setRole
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_setRole_in_ddlStatement1727);
                    setRole63=setRole();

                    state._fsp--;

                    adaptor.addChild(root_0, setRole63.getTree());

                    }
                    break;
                case 35 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:716:7: showCurrentRole
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_showCurrentRole_in_ddlStatement1735);
                    showCurrentRole64=showCurrentRole();

                    state._fsp--;

                    adaptor.addChild(root_0, showCurrentRole64.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "ddlStatement"


    public static class ifExists_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "ifExists"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:719:1: ifExists : KW_IF KW_EXISTS -> ^( TOK_IFEXISTS ) ;
    public final HiveParser.ifExists_return ifExists() throws RecognitionException {
        HiveParser.ifExists_return retval = new HiveParser.ifExists_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_IF65=null;
        Token KW_EXISTS66=null;

        CommonTree KW_IF65_tree=null;
        CommonTree KW_EXISTS66_tree=null;
        RewriteRuleTokenStream stream_KW_IF=new RewriteRuleTokenStream(adaptor,"token KW_IF");
        RewriteRuleTokenStream stream_KW_EXISTS=new RewriteRuleTokenStream(adaptor,"token KW_EXISTS");

         pushMsg("if exists clause", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:722:5: ( KW_IF KW_EXISTS -> ^( TOK_IFEXISTS ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:722:7: KW_IF KW_EXISTS
            {
            KW_IF65=(Token)match(input,KW_IF,FOLLOW_KW_IF_in_ifExists1762);  
            stream_KW_IF.add(KW_IF65);


            KW_EXISTS66=(Token)match(input,KW_EXISTS,FOLLOW_KW_EXISTS_in_ifExists1764);  
            stream_KW_EXISTS.add(KW_EXISTS66);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 723:5: -> ^( TOK_IFEXISTS )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:723:8: ^( TOK_IFEXISTS )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_IFEXISTS, "TOK_IFEXISTS")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "ifExists"


    public static class restrictOrCascade_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "restrictOrCascade"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:726:1: restrictOrCascade : ( KW_RESTRICT -> ^( TOK_RESTRICT ) | KW_CASCADE -> ^( TOK_CASCADE ) );
    public final HiveParser.restrictOrCascade_return restrictOrCascade() throws RecognitionException {
        HiveParser.restrictOrCascade_return retval = new HiveParser.restrictOrCascade_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_RESTRICT67=null;
        Token KW_CASCADE68=null;

        CommonTree KW_RESTRICT67_tree=null;
        CommonTree KW_CASCADE68_tree=null;
        RewriteRuleTokenStream stream_KW_CASCADE=new RewriteRuleTokenStream(adaptor,"token KW_CASCADE");
        RewriteRuleTokenStream stream_KW_RESTRICT=new RewriteRuleTokenStream(adaptor,"token KW_RESTRICT");

         pushMsg("restrict or cascade clause", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:729:5: ( KW_RESTRICT -> ^( TOK_RESTRICT ) | KW_CASCADE -> ^( TOK_CASCADE ) )
            int alt11=2;
            switch ( input.LA(1) ) {
            case KW_RESTRICT:
                {
                alt11=1;
                }
                break;
            case KW_CASCADE:
                {
                alt11=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 11, 0, input);

                throw nvae;

            }

            switch (alt11) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:729:7: KW_RESTRICT
                    {
                    KW_RESTRICT67=(Token)match(input,KW_RESTRICT,FOLLOW_KW_RESTRICT_in_restrictOrCascade1801);  
                    stream_KW_RESTRICT.add(KW_RESTRICT67);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 730:5: -> ^( TOK_RESTRICT )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:730:8: ^( TOK_RESTRICT )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_RESTRICT, "TOK_RESTRICT")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:731:7: KW_CASCADE
                    {
                    KW_CASCADE68=(Token)match(input,KW_CASCADE,FOLLOW_KW_CASCADE_in_restrictOrCascade1819);  
                    stream_KW_CASCADE.add(KW_CASCADE68);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 732:5: -> ^( TOK_CASCADE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:732:8: ^( TOK_CASCADE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_CASCADE, "TOK_CASCADE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "restrictOrCascade"


    public static class ifNotExists_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "ifNotExists"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:735:1: ifNotExists : KW_IF KW_NOT KW_EXISTS -> ^( TOK_IFNOTEXISTS ) ;
    public final HiveParser.ifNotExists_return ifNotExists() throws RecognitionException {
        HiveParser.ifNotExists_return retval = new HiveParser.ifNotExists_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_IF69=null;
        Token KW_NOT70=null;
        Token KW_EXISTS71=null;

        CommonTree KW_IF69_tree=null;
        CommonTree KW_NOT70_tree=null;
        CommonTree KW_EXISTS71_tree=null;
        RewriteRuleTokenStream stream_KW_IF=new RewriteRuleTokenStream(adaptor,"token KW_IF");
        RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
        RewriteRuleTokenStream stream_KW_EXISTS=new RewriteRuleTokenStream(adaptor,"token KW_EXISTS");

         pushMsg("if not exists clause", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:738:5: ( KW_IF KW_NOT KW_EXISTS -> ^( TOK_IFNOTEXISTS ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:738:7: KW_IF KW_NOT KW_EXISTS
            {
            KW_IF69=(Token)match(input,KW_IF,FOLLOW_KW_IF_in_ifNotExists1856);  
            stream_KW_IF.add(KW_IF69);


            KW_NOT70=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_ifNotExists1858);  
            stream_KW_NOT.add(KW_NOT70);


            KW_EXISTS71=(Token)match(input,KW_EXISTS,FOLLOW_KW_EXISTS_in_ifNotExists1860);  
            stream_KW_EXISTS.add(KW_EXISTS71);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 739:5: -> ^( TOK_IFNOTEXISTS )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:739:8: ^( TOK_IFNOTEXISTS )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_IFNOTEXISTS, "TOK_IFNOTEXISTS")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "ifNotExists"


    public static class storedAsDirs_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "storedAsDirs"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:742:1: storedAsDirs : KW_STORED KW_AS KW_DIRECTORIES -> ^( TOK_STOREDASDIRS ) ;
    public final HiveParser.storedAsDirs_return storedAsDirs() throws RecognitionException {
        HiveParser.storedAsDirs_return retval = new HiveParser.storedAsDirs_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_STORED72=null;
        Token KW_AS73=null;
        Token KW_DIRECTORIES74=null;

        CommonTree KW_STORED72_tree=null;
        CommonTree KW_AS73_tree=null;
        CommonTree KW_DIRECTORIES74_tree=null;
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_STORED=new RewriteRuleTokenStream(adaptor,"token KW_STORED");
        RewriteRuleTokenStream stream_KW_DIRECTORIES=new RewriteRuleTokenStream(adaptor,"token KW_DIRECTORIES");

         pushMsg("stored as directories", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:745:5: ( KW_STORED KW_AS KW_DIRECTORIES -> ^( TOK_STOREDASDIRS ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:745:7: KW_STORED KW_AS KW_DIRECTORIES
            {
            KW_STORED72=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_storedAsDirs1897);  
            stream_KW_STORED.add(KW_STORED72);


            KW_AS73=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_storedAsDirs1899);  
            stream_KW_AS.add(KW_AS73);


            KW_DIRECTORIES74=(Token)match(input,KW_DIRECTORIES,FOLLOW_KW_DIRECTORIES_in_storedAsDirs1901);  
            stream_KW_DIRECTORIES.add(KW_DIRECTORIES74);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 746:5: -> ^( TOK_STOREDASDIRS )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:746:8: ^( TOK_STOREDASDIRS )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_STOREDASDIRS, "TOK_STOREDASDIRS")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "storedAsDirs"


    public static class orReplace_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "orReplace"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:749:1: orReplace : KW_OR KW_REPLACE -> ^( TOK_ORREPLACE ) ;
    public final HiveParser.orReplace_return orReplace() throws RecognitionException {
        HiveParser.orReplace_return retval = new HiveParser.orReplace_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_OR75=null;
        Token KW_REPLACE76=null;

        CommonTree KW_OR75_tree=null;
        CommonTree KW_REPLACE76_tree=null;
        RewriteRuleTokenStream stream_KW_REPLACE=new RewriteRuleTokenStream(adaptor,"token KW_REPLACE");
        RewriteRuleTokenStream stream_KW_OR=new RewriteRuleTokenStream(adaptor,"token KW_OR");

         pushMsg("or replace clause", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:752:5: ( KW_OR KW_REPLACE -> ^( TOK_ORREPLACE ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:752:7: KW_OR KW_REPLACE
            {
            KW_OR75=(Token)match(input,KW_OR,FOLLOW_KW_OR_in_orReplace1938);  
            stream_KW_OR.add(KW_OR75);


            KW_REPLACE76=(Token)match(input,KW_REPLACE,FOLLOW_KW_REPLACE_in_orReplace1940);  
            stream_KW_REPLACE.add(KW_REPLACE76);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 753:5: -> ^( TOK_ORREPLACE )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:753:8: ^( TOK_ORREPLACE )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ORREPLACE, "TOK_ORREPLACE")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "orReplace"


    public static class ignoreProtection_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "ignoreProtection"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:756:1: ignoreProtection : KW_IGNORE KW_PROTECTION -> ^( TOK_IGNOREPROTECTION ) ;
    public final HiveParser.ignoreProtection_return ignoreProtection() throws RecognitionException {
        HiveParser.ignoreProtection_return retval = new HiveParser.ignoreProtection_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_IGNORE77=null;
        Token KW_PROTECTION78=null;

        CommonTree KW_IGNORE77_tree=null;
        CommonTree KW_PROTECTION78_tree=null;
        RewriteRuleTokenStream stream_KW_PROTECTION=new RewriteRuleTokenStream(adaptor,"token KW_PROTECTION");
        RewriteRuleTokenStream stream_KW_IGNORE=new RewriteRuleTokenStream(adaptor,"token KW_IGNORE");

         pushMsg("ignore protection clause", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:759:9: ( KW_IGNORE KW_PROTECTION -> ^( TOK_IGNOREPROTECTION ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:759:11: KW_IGNORE KW_PROTECTION
            {
            KW_IGNORE77=(Token)match(input,KW_IGNORE,FOLLOW_KW_IGNORE_in_ignoreProtection1981);  
            stream_KW_IGNORE.add(KW_IGNORE77);


            KW_PROTECTION78=(Token)match(input,KW_PROTECTION,FOLLOW_KW_PROTECTION_in_ignoreProtection1983);  
            stream_KW_PROTECTION.add(KW_PROTECTION78);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 760:9: -> ^( TOK_IGNOREPROTECTION )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:760:12: ^( TOK_IGNOREPROTECTION )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_IGNOREPROTECTION, "TOK_IGNOREPROTECTION")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "ignoreProtection"


    public static class createDatabaseStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "createDatabaseStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:763:1: createDatabaseStatement : KW_CREATE ( KW_DATABASE | KW_SCHEMA ) ( ifNotExists )? name= identifier ( databaseComment )? ( dbLocation )? ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )? -> ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? ) ;
    public final HiveParser.createDatabaseStatement_return createDatabaseStatement() throws RecognitionException {
        HiveParser.createDatabaseStatement_return retval = new HiveParser.createDatabaseStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_CREATE79=null;
        Token KW_DATABASE80=null;
        Token KW_SCHEMA81=null;
        Token KW_WITH85=null;
        Token KW_DBPROPERTIES86=null;
        HiveParser_IdentifiersParser.identifier_return name =null;

        HiveParser.dbProperties_return dbprops =null;

        HiveParser.ifNotExists_return ifNotExists82 =null;

        HiveParser.databaseComment_return databaseComment83 =null;

        HiveParser.dbLocation_return dbLocation84 =null;


        CommonTree KW_CREATE79_tree=null;
        CommonTree KW_DATABASE80_tree=null;
        CommonTree KW_SCHEMA81_tree=null;
        CommonTree KW_WITH85_tree=null;
        CommonTree KW_DBPROPERTIES86_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_KW_DBPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_DBPROPERTIES");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleSubtreeStream stream_dbProperties=new RewriteRuleSubtreeStream(adaptor,"rule dbProperties");
        RewriteRuleSubtreeStream stream_dbLocation=new RewriteRuleSubtreeStream(adaptor,"rule dbLocation");
        RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
        RewriteRuleSubtreeStream stream_databaseComment=new RewriteRuleSubtreeStream(adaptor,"rule databaseComment");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("create database statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:766:5: ( KW_CREATE ( KW_DATABASE | KW_SCHEMA ) ( ifNotExists )? name= identifier ( databaseComment )? ( dbLocation )? ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )? -> ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:766:7: KW_CREATE ( KW_DATABASE | KW_SCHEMA ) ( ifNotExists )? name= identifier ( databaseComment )? ( dbLocation )? ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )?
            {
            KW_CREATE79=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createDatabaseStatement2028);  
            stream_KW_CREATE.add(KW_CREATE79);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:766:17: ( KW_DATABASE | KW_SCHEMA )
            int alt12=2;
            switch ( input.LA(1) ) {
            case KW_DATABASE:
                {
                alt12=1;
                }
                break;
            case KW_SCHEMA:
                {
                alt12=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 12, 0, input);

                throw nvae;

            }

            switch (alt12) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:766:18: KW_DATABASE
                    {
                    KW_DATABASE80=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_createDatabaseStatement2031);  
                    stream_KW_DATABASE.add(KW_DATABASE80);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:766:30: KW_SCHEMA
                    {
                    KW_SCHEMA81=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_createDatabaseStatement2033);  
                    stream_KW_SCHEMA.add(KW_SCHEMA81);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:767:9: ( ifNotExists )?
            int alt13=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt13=1;
                    }
                    break;
            }

            switch (alt13) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:767:9: ifNotExists
                    {
                    pushFollow(FOLLOW_ifNotExists_in_createDatabaseStatement2044);
                    ifNotExists82=ifNotExists();

                    state._fsp--;

                    stream_ifNotExists.add(ifNotExists82.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_createDatabaseStatement2057);
            name=identifier();

            state._fsp--;

            stream_identifier.add(name.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:769:9: ( databaseComment )?
            int alt14=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt14=1;
                    }
                    break;
            }

            switch (alt14) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:769:9: databaseComment
                    {
                    pushFollow(FOLLOW_databaseComment_in_createDatabaseStatement2067);
                    databaseComment83=databaseComment();

                    state._fsp--;

                    stream_databaseComment.add(databaseComment83.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:770:9: ( dbLocation )?
            int alt15=2;
            switch ( input.LA(1) ) {
                case KW_LOCATION:
                    {
                    alt15=1;
                    }
                    break;
            }

            switch (alt15) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:770:9: dbLocation
                    {
                    pushFollow(FOLLOW_dbLocation_in_createDatabaseStatement2078);
                    dbLocation84=dbLocation();

                    state._fsp--;

                    stream_dbLocation.add(dbLocation84.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:771:9: ( KW_WITH KW_DBPROPERTIES dbprops= dbProperties )?
            int alt16=2;
            switch ( input.LA(1) ) {
                case KW_WITH:
                    {
                    alt16=1;
                    }
                    break;
            }

            switch (alt16) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:771:10: KW_WITH KW_DBPROPERTIES dbprops= dbProperties
                    {
                    KW_WITH85=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_createDatabaseStatement2090);  
                    stream_KW_WITH.add(KW_WITH85);


                    KW_DBPROPERTIES86=(Token)match(input,KW_DBPROPERTIES,FOLLOW_KW_DBPROPERTIES_in_createDatabaseStatement2092);  
                    stream_KW_DBPROPERTIES.add(KW_DBPROPERTIES86);


                    pushFollow(FOLLOW_dbProperties_in_createDatabaseStatement2096);
                    dbprops=dbProperties();

                    state._fsp--;

                    stream_dbProperties.add(dbprops.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: dbLocation, name, dbprops, ifNotExists, databaseComment
            // token labels: 
            // rule labels: retval, name, dbprops
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.tree:null);
            RewriteRuleSubtreeStream stream_dbprops=new RewriteRuleSubtreeStream(adaptor,"rule dbprops",dbprops!=null?dbprops.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 772:5: -> ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:772:8: ^( TOK_CREATEDATABASE $name ( ifNotExists )? ( dbLocation )? ( databaseComment )? ( $dbprops)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEDATABASE, "TOK_CREATEDATABASE")
                , root_1);

                adaptor.addChild(root_1, stream_name.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:772:35: ( ifNotExists )?
                if ( stream_ifNotExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifNotExists.nextTree());

                }
                stream_ifNotExists.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:772:48: ( dbLocation )?
                if ( stream_dbLocation.hasNext() ) {
                    adaptor.addChild(root_1, stream_dbLocation.nextTree());

                }
                stream_dbLocation.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:772:60: ( databaseComment )?
                if ( stream_databaseComment.hasNext() ) {
                    adaptor.addChild(root_1, stream_databaseComment.nextTree());

                }
                stream_databaseComment.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:772:78: ( $dbprops)?
                if ( stream_dbprops.hasNext() ) {
                    adaptor.addChild(root_1, stream_dbprops.nextTree());

                }
                stream_dbprops.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "createDatabaseStatement"


    public static class dbLocation_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dbLocation"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:775:1: dbLocation : KW_LOCATION locn= StringLiteral -> ^( TOK_DATABASELOCATION $locn) ;
    public final HiveParser.dbLocation_return dbLocation() throws RecognitionException {
        HiveParser.dbLocation_return retval = new HiveParser.dbLocation_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token locn=null;
        Token KW_LOCATION87=null;

        CommonTree locn_tree=null;
        CommonTree KW_LOCATION87_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");

         pushMsg("database location specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:778:5: ( KW_LOCATION locn= StringLiteral -> ^( TOK_DATABASELOCATION $locn) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:779:7: KW_LOCATION locn= StringLiteral
            {
            KW_LOCATION87=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_dbLocation2157);  
            stream_KW_LOCATION.add(KW_LOCATION87);


            locn=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_dbLocation2161);  
            stream_StringLiteral.add(locn);


            // AST REWRITE
            // elements: locn
            // token labels: locn
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_locn=new RewriteRuleTokenStream(adaptor,"token locn",locn);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 779:38: -> ^( TOK_DATABASELOCATION $locn)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:779:41: ^( TOK_DATABASELOCATION $locn)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DATABASELOCATION, "TOK_DATABASELOCATION")
                , root_1);

                adaptor.addChild(root_1, stream_locn.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dbLocation"


    public static class dbProperties_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dbProperties"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:782:1: dbProperties : LPAREN dbPropertiesList RPAREN -> ^( TOK_DATABASEPROPERTIES dbPropertiesList ) ;
    public final HiveParser.dbProperties_return dbProperties() throws RecognitionException {
        HiveParser.dbProperties_return retval = new HiveParser.dbProperties_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token LPAREN88=null;
        Token RPAREN90=null;
        HiveParser.dbPropertiesList_return dbPropertiesList89 =null;


        CommonTree LPAREN88_tree=null;
        CommonTree RPAREN90_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_dbPropertiesList=new RewriteRuleSubtreeStream(adaptor,"rule dbPropertiesList");
         pushMsg("dbproperties", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:785:5: ( LPAREN dbPropertiesList RPAREN -> ^( TOK_DATABASEPROPERTIES dbPropertiesList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:786:7: LPAREN dbPropertiesList RPAREN
            {
            LPAREN88=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_dbProperties2203);  
            stream_LPAREN.add(LPAREN88);


            pushFollow(FOLLOW_dbPropertiesList_in_dbProperties2205);
            dbPropertiesList89=dbPropertiesList();

            state._fsp--;

            stream_dbPropertiesList.add(dbPropertiesList89.getTree());

            RPAREN90=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_dbProperties2207);  
            stream_RPAREN.add(RPAREN90);


            // AST REWRITE
            // elements: dbPropertiesList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 786:38: -> ^( TOK_DATABASEPROPERTIES dbPropertiesList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:786:41: ^( TOK_DATABASEPROPERTIES dbPropertiesList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DATABASEPROPERTIES, "TOK_DATABASEPROPERTIES")
                , root_1);

                adaptor.addChild(root_1, stream_dbPropertiesList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dbProperties"


    public static class dbPropertiesList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dbPropertiesList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:789:1: dbPropertiesList : keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_DBPROPLIST ( keyValueProperty )+ ) ;
    public final HiveParser.dbPropertiesList_return dbPropertiesList() throws RecognitionException {
        HiveParser.dbPropertiesList_return retval = new HiveParser.dbPropertiesList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA92=null;
        HiveParser.keyValueProperty_return keyValueProperty91 =null;

        HiveParser.keyValueProperty_return keyValueProperty93 =null;


        CommonTree COMMA92_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_keyValueProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyValueProperty");
         pushMsg("database properties list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:792:5: ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_DBPROPLIST ( keyValueProperty )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:793:7: keyValueProperty ( COMMA keyValueProperty )*
            {
            pushFollow(FOLLOW_keyValueProperty_in_dbPropertiesList2248);
            keyValueProperty91=keyValueProperty();

            state._fsp--;

            stream_keyValueProperty.add(keyValueProperty91.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:793:24: ( COMMA keyValueProperty )*
            loop17:
            do {
                int alt17=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt17=1;
                    }
                    break;

                }

                switch (alt17) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:793:25: COMMA keyValueProperty
            	    {
            	    COMMA92=(Token)match(input,COMMA,FOLLOW_COMMA_in_dbPropertiesList2251);  
            	    stream_COMMA.add(COMMA92);


            	    pushFollow(FOLLOW_keyValueProperty_in_dbPropertiesList2253);
            	    keyValueProperty93=keyValueProperty();

            	    state._fsp--;

            	    stream_keyValueProperty.add(keyValueProperty93.getTree());

            	    }
            	    break;

            	default :
            	    break loop17;
                }
            } while (true);


            // AST REWRITE
            // elements: keyValueProperty
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 793:50: -> ^( TOK_DBPROPLIST ( keyValueProperty )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:793:53: ^( TOK_DBPROPLIST ( keyValueProperty )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DBPROPLIST, "TOK_DBPROPLIST")
                , root_1);

                if ( !(stream_keyValueProperty.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_keyValueProperty.hasNext() ) {
                    adaptor.addChild(root_1, stream_keyValueProperty.nextTree());

                }
                stream_keyValueProperty.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dbPropertiesList"


    public static class switchDatabaseStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "switchDatabaseStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:797:1: switchDatabaseStatement : KW_USE identifier -> ^( TOK_SWITCHDATABASE identifier ) ;
    public final HiveParser.switchDatabaseStatement_return switchDatabaseStatement() throws RecognitionException {
        HiveParser.switchDatabaseStatement_return retval = new HiveParser.switchDatabaseStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_USE94=null;
        HiveParser_IdentifiersParser.identifier_return identifier95 =null;


        CommonTree KW_USE94_tree=null;
        RewriteRuleTokenStream stream_KW_USE=new RewriteRuleTokenStream(adaptor,"token KW_USE");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("switch database statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:800:5: ( KW_USE identifier -> ^( TOK_SWITCHDATABASE identifier ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:800:7: KW_USE identifier
            {
            KW_USE94=(Token)match(input,KW_USE,FOLLOW_KW_USE_in_switchDatabaseStatement2292);  
            stream_KW_USE.add(KW_USE94);


            pushFollow(FOLLOW_identifier_in_switchDatabaseStatement2294);
            identifier95=identifier();

            state._fsp--;

            stream_identifier.add(identifier95.getTree());

            // AST REWRITE
            // elements: identifier
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 801:5: -> ^( TOK_SWITCHDATABASE identifier )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:801:8: ^( TOK_SWITCHDATABASE identifier )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SWITCHDATABASE, "TOK_SWITCHDATABASE")
                , root_1);

                adaptor.addChild(root_1, stream_identifier.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "switchDatabaseStatement"


    public static class dropDatabaseStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dropDatabaseStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:804:1: dropDatabaseStatement : KW_DROP ( KW_DATABASE | KW_SCHEMA ) ( ifExists )? identifier ( restrictOrCascade )? -> ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? ) ;
    public final HiveParser.dropDatabaseStatement_return dropDatabaseStatement() throws RecognitionException {
        HiveParser.dropDatabaseStatement_return retval = new HiveParser.dropDatabaseStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DROP96=null;
        Token KW_DATABASE97=null;
        Token KW_SCHEMA98=null;
        HiveParser.ifExists_return ifExists99 =null;

        HiveParser_IdentifiersParser.identifier_return identifier100 =null;

        HiveParser.restrictOrCascade_return restrictOrCascade101 =null;


        CommonTree KW_DROP96_tree=null;
        CommonTree KW_DATABASE97_tree=null;
        CommonTree KW_SCHEMA98_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleSubtreeStream stream_restrictOrCascade=new RewriteRuleSubtreeStream(adaptor,"rule restrictOrCascade");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("drop database statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:5: ( KW_DROP ( KW_DATABASE | KW_SCHEMA ) ( ifExists )? identifier ( restrictOrCascade )? -> ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:7: KW_DROP ( KW_DATABASE | KW_SCHEMA ) ( ifExists )? identifier ( restrictOrCascade )?
            {
            KW_DROP96=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropDatabaseStatement2333);  
            stream_KW_DROP.add(KW_DROP96);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:15: ( KW_DATABASE | KW_SCHEMA )
            int alt18=2;
            switch ( input.LA(1) ) {
            case KW_DATABASE:
                {
                alt18=1;
                }
                break;
            case KW_SCHEMA:
                {
                alt18=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 18, 0, input);

                throw nvae;

            }

            switch (alt18) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:16: KW_DATABASE
                    {
                    KW_DATABASE97=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_dropDatabaseStatement2336);  
                    stream_KW_DATABASE.add(KW_DATABASE97);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:28: KW_SCHEMA
                    {
                    KW_SCHEMA98=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_dropDatabaseStatement2338);  
                    stream_KW_SCHEMA.add(KW_SCHEMA98);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:39: ( ifExists )?
            int alt19=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt19=1;
                    }
                    break;
            }

            switch (alt19) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:39: ifExists
                    {
                    pushFollow(FOLLOW_ifExists_in_dropDatabaseStatement2341);
                    ifExists99=ifExists();

                    state._fsp--;

                    stream_ifExists.add(ifExists99.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_dropDatabaseStatement2344);
            identifier100=identifier();

            state._fsp--;

            stream_identifier.add(identifier100.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:60: ( restrictOrCascade )?
            int alt20=2;
            switch ( input.LA(1) ) {
                case KW_CASCADE:
                case KW_RESTRICT:
                    {
                    alt20=1;
                    }
                    break;
            }

            switch (alt20) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:807:60: restrictOrCascade
                    {
                    pushFollow(FOLLOW_restrictOrCascade_in_dropDatabaseStatement2346);
                    restrictOrCascade101=restrictOrCascade();

                    state._fsp--;

                    stream_restrictOrCascade.add(restrictOrCascade101.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: restrictOrCascade, ifExists, identifier
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 808:5: -> ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:808:8: ^( TOK_DROPDATABASE identifier ( ifExists )? ( restrictOrCascade )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPDATABASE, "TOK_DROPDATABASE")
                , root_1);

                adaptor.addChild(root_1, stream_identifier.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:808:38: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:808:48: ( restrictOrCascade )?
                if ( stream_restrictOrCascade.hasNext() ) {
                    adaptor.addChild(root_1, stream_restrictOrCascade.nextTree());

                }
                stream_restrictOrCascade.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dropDatabaseStatement"


    public static class databaseComment_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "databaseComment"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:811:1: databaseComment : KW_COMMENT comment= StringLiteral -> ^( TOK_DATABASECOMMENT $comment) ;
    public final HiveParser.databaseComment_return databaseComment() throws RecognitionException {
        HiveParser.databaseComment_return retval = new HiveParser.databaseComment_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_COMMENT102=null;

        CommonTree comment_tree=null;
        CommonTree KW_COMMENT102_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");

         pushMsg("database's comment", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:814:5: ( KW_COMMENT comment= StringLiteral -> ^( TOK_DATABASECOMMENT $comment) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:814:7: KW_COMMENT comment= StringLiteral
            {
            KW_COMMENT102=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_databaseComment2392);  
            stream_KW_COMMENT.add(KW_COMMENT102);


            comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_databaseComment2396);  
            stream_StringLiteral.add(comment);


            // AST REWRITE
            // elements: comment
            // token labels: comment
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 815:5: -> ^( TOK_DATABASECOMMENT $comment)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:815:8: ^( TOK_DATABASECOMMENT $comment)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DATABASECOMMENT, "TOK_DATABASECOMMENT")
                , root_1);

                adaptor.addChild(root_1, stream_comment.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "databaseComment"


    public static class createTableStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "createTableStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:818:1: createTableStatement : KW_CREATE (temp= KW_TEMPORARY )? (ext= KW_EXTERNAL )? KW_TABLE ( ifNotExists )? name= tableName (like= KW_LIKE likeName= tableName ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? ) -> ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? ) ;
    public final HiveParser.createTableStatement_return createTableStatement() throws RecognitionException {
        HiveParser.createTableStatement_return retval = new HiveParser.createTableStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token temp=null;
        Token ext=null;
        Token like=null;
        Token KW_CREATE103=null;
        Token KW_TABLE104=null;
        Token LPAREN108=null;
        Token RPAREN110=null;
        Token KW_AS119=null;
        HiveParser_FromClauseParser.tableName_return name =null;

        HiveParser_FromClauseParser.tableName_return likeName =null;

        HiveParser.ifNotExists_return ifNotExists105 =null;

        HiveParser.tableLocation_return tableLocation106 =null;

        HiveParser.tablePropertiesPrefixed_return tablePropertiesPrefixed107 =null;

        HiveParser.columnNameTypeList_return columnNameTypeList109 =null;

        HiveParser.tableComment_return tableComment111 =null;

        HiveParser.tablePartition_return tablePartition112 =null;

        HiveParser.tableBuckets_return tableBuckets113 =null;

        HiveParser.tableSkewed_return tableSkewed114 =null;

        HiveParser.tableRowFormat_return tableRowFormat115 =null;

        HiveParser.tableFileFormat_return tableFileFormat116 =null;

        HiveParser.tableLocation_return tableLocation117 =null;

        HiveParser.tablePropertiesPrefixed_return tablePropertiesPrefixed118 =null;

        HiveParser.selectStatementWithCTE_return selectStatementWithCTE120 =null;


        CommonTree temp_tree=null;
        CommonTree ext_tree=null;
        CommonTree like_tree=null;
        CommonTree KW_CREATE103_tree=null;
        CommonTree KW_TABLE104_tree=null;
        CommonTree LPAREN108_tree=null;
        CommonTree RPAREN110_tree=null;
        CommonTree KW_AS119_tree=null;
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_LIKE=new RewriteRuleTokenStream(adaptor,"token KW_LIKE");
        RewriteRuleTokenStream stream_KW_EXTERNAL=new RewriteRuleTokenStream(adaptor,"token KW_EXTERNAL");
        RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_selectStatementWithCTE=new RewriteRuleSubtreeStream(adaptor,"rule selectStatementWithCTE");
        RewriteRuleSubtreeStream stream_columnNameTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeList");
        RewriteRuleSubtreeStream stream_tableBuckets=new RewriteRuleSubtreeStream(adaptor,"rule tableBuckets");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_tablePartition=new RewriteRuleSubtreeStream(adaptor,"rule tablePartition");
        RewriteRuleSubtreeStream stream_tablePropertiesPrefixed=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesPrefixed");
        RewriteRuleSubtreeStream stream_tableComment=new RewriteRuleSubtreeStream(adaptor,"rule tableComment");
        RewriteRuleSubtreeStream stream_tableRowFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormat");
        RewriteRuleSubtreeStream stream_tableFileFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableFileFormat");
        RewriteRuleSubtreeStream stream_tableLocation=new RewriteRuleSubtreeStream(adaptor,"rule tableLocation");
        RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
        RewriteRuleSubtreeStream stream_tableSkewed=new RewriteRuleSubtreeStream(adaptor,"rule tableSkewed");
         pushMsg("create table statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:5: ( KW_CREATE (temp= KW_TEMPORARY )? (ext= KW_EXTERNAL )? KW_TABLE ( ifNotExists )? name= tableName (like= KW_LIKE likeName= tableName ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? ) -> ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:7: KW_CREATE (temp= KW_TEMPORARY )? (ext= KW_EXTERNAL )? KW_TABLE ( ifNotExists )? name= tableName (like= KW_LIKE likeName= tableName ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? )
            {
            KW_CREATE103=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createTableStatement2436);  
            stream_KW_CREATE.add(KW_CREATE103);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:17: (temp= KW_TEMPORARY )?
            int alt21=2;
            switch ( input.LA(1) ) {
                case KW_TEMPORARY:
                    {
                    alt21=1;
                    }
                    break;
            }

            switch (alt21) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:18: temp= KW_TEMPORARY
                    {
                    temp=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_createTableStatement2441);  
                    stream_KW_TEMPORARY.add(temp);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:38: (ext= KW_EXTERNAL )?
            int alt22=2;
            switch ( input.LA(1) ) {
                case KW_EXTERNAL:
                    {
                    alt22=1;
                    }
                    break;
            }

            switch (alt22) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:39: ext= KW_EXTERNAL
                    {
                    ext=(Token)match(input,KW_EXTERNAL,FOLLOW_KW_EXTERNAL_in_createTableStatement2448);  
                    stream_KW_EXTERNAL.add(ext);


                    }
                    break;

            }


            KW_TABLE104=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_createTableStatement2452);  
            stream_KW_TABLE.add(KW_TABLE104);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:66: ( ifNotExists )?
            int alt23=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt23=1;
                    }
                    break;
            }

            switch (alt23) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:821:66: ifNotExists
                    {
                    pushFollow(FOLLOW_ifNotExists_in_createTableStatement2454);
                    ifNotExists105=ifNotExists();

                    state._fsp--;

                    stream_ifNotExists.add(ifNotExists105.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_tableName_in_createTableStatement2459);
            name=tableName();

            state._fsp--;

            stream_tableName.add(name.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:822:7: (like= KW_LIKE likeName= tableName ( tableLocation )? ( tablePropertiesPrefixed )? | ( LPAREN columnNameTypeList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )? )
            int alt36=2;
            switch ( input.LA(1) ) {
            case KW_LIKE:
                {
                alt36=1;
                }
                break;
            case EOF:
            case KW_AS:
            case KW_CLUSTERED:
            case KW_COMMENT:
            case KW_LOCATION:
            case KW_PARTITIONED:
            case KW_ROW:
            case KW_SKEWED:
            case KW_STORED:
            case KW_TBLPROPERTIES:
            case LPAREN:
                {
                alt36=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 36, 0, input);

                throw nvae;

            }

            switch (alt36) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:822:10: like= KW_LIKE likeName= tableName ( tableLocation )? ( tablePropertiesPrefixed )?
                    {
                    like=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_createTableStatement2472);  
                    stream_KW_LIKE.add(like);


                    pushFollow(FOLLOW_tableName_in_createTableStatement2476);
                    likeName=tableName();

                    state._fsp--;

                    stream_tableName.add(likeName.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:823:10: ( tableLocation )?
                    int alt24=2;
                    switch ( input.LA(1) ) {
                        case KW_LOCATION:
                            {
                            alt24=1;
                            }
                            break;
                    }

                    switch (alt24) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:823:10: tableLocation
                            {
                            pushFollow(FOLLOW_tableLocation_in_createTableStatement2487);
                            tableLocation106=tableLocation();

                            state._fsp--;

                            stream_tableLocation.add(tableLocation106.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:824:10: ( tablePropertiesPrefixed )?
                    int alt25=2;
                    switch ( input.LA(1) ) {
                        case KW_TBLPROPERTIES:
                            {
                            alt25=1;
                            }
                            break;
                    }

                    switch (alt25) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:824:10: tablePropertiesPrefixed
                            {
                            pushFollow(FOLLOW_tablePropertiesPrefixed_in_createTableStatement2499);
                            tablePropertiesPrefixed107=tablePropertiesPrefixed();

                            state._fsp--;

                            stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed107.getTree());

                            }
                            break;

                    }


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:825:10: ( LPAREN columnNameTypeList RPAREN )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( KW_AS selectStatementWithCTE )?
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:825:10: ( LPAREN columnNameTypeList RPAREN )?
                    int alt26=2;
                    switch ( input.LA(1) ) {
                        case LPAREN:
                            {
                            alt26=1;
                            }
                            break;
                    }

                    switch (alt26) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:825:11: LPAREN columnNameTypeList RPAREN
                            {
                            LPAREN108=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_createTableStatement2512);  
                            stream_LPAREN.add(LPAREN108);


                            pushFollow(FOLLOW_columnNameTypeList_in_createTableStatement2514);
                            columnNameTypeList109=columnNameTypeList();

                            state._fsp--;

                            stream_columnNameTypeList.add(columnNameTypeList109.getTree());

                            RPAREN110=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_createTableStatement2516);  
                            stream_RPAREN.add(RPAREN110);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:826:10: ( tableComment )?
                    int alt27=2;
                    switch ( input.LA(1) ) {
                        case KW_COMMENT:
                            {
                            alt27=1;
                            }
                            break;
                    }

                    switch (alt27) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:826:10: tableComment
                            {
                            pushFollow(FOLLOW_tableComment_in_createTableStatement2529);
                            tableComment111=tableComment();

                            state._fsp--;

                            stream_tableComment.add(tableComment111.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:827:10: ( tablePartition )?
                    int alt28=2;
                    switch ( input.LA(1) ) {
                        case KW_PARTITIONED:
                            {
                            alt28=1;
                            }
                            break;
                    }

                    switch (alt28) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:827:10: tablePartition
                            {
                            pushFollow(FOLLOW_tablePartition_in_createTableStatement2541);
                            tablePartition112=tablePartition();

                            state._fsp--;

                            stream_tablePartition.add(tablePartition112.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:828:10: ( tableBuckets )?
                    int alt29=2;
                    switch ( input.LA(1) ) {
                        case KW_CLUSTERED:
                            {
                            alt29=1;
                            }
                            break;
                    }

                    switch (alt29) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:828:10: tableBuckets
                            {
                            pushFollow(FOLLOW_tableBuckets_in_createTableStatement2553);
                            tableBuckets113=tableBuckets();

                            state._fsp--;

                            stream_tableBuckets.add(tableBuckets113.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:829:10: ( tableSkewed )?
                    int alt30=2;
                    switch ( input.LA(1) ) {
                        case KW_SKEWED:
                            {
                            alt30=1;
                            }
                            break;
                    }

                    switch (alt30) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:829:10: tableSkewed
                            {
                            pushFollow(FOLLOW_tableSkewed_in_createTableStatement2565);
                            tableSkewed114=tableSkewed();

                            state._fsp--;

                            stream_tableSkewed.add(tableSkewed114.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:830:10: ( tableRowFormat )?
                    int alt31=2;
                    switch ( input.LA(1) ) {
                        case KW_ROW:
                            {
                            alt31=1;
                            }
                            break;
                    }

                    switch (alt31) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:830:10: tableRowFormat
                            {
                            pushFollow(FOLLOW_tableRowFormat_in_createTableStatement2577);
                            tableRowFormat115=tableRowFormat();

                            state._fsp--;

                            stream_tableRowFormat.add(tableRowFormat115.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:831:10: ( tableFileFormat )?
                    int alt32=2;
                    switch ( input.LA(1) ) {
                        case KW_STORED:
                            {
                            alt32=1;
                            }
                            break;
                    }

                    switch (alt32) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:831:10: tableFileFormat
                            {
                            pushFollow(FOLLOW_tableFileFormat_in_createTableStatement2589);
                            tableFileFormat116=tableFileFormat();

                            state._fsp--;

                            stream_tableFileFormat.add(tableFileFormat116.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:832:10: ( tableLocation )?
                    int alt33=2;
                    switch ( input.LA(1) ) {
                        case KW_LOCATION:
                            {
                            alt33=1;
                            }
                            break;
                    }

                    switch (alt33) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:832:10: tableLocation
                            {
                            pushFollow(FOLLOW_tableLocation_in_createTableStatement2601);
                            tableLocation117=tableLocation();

                            state._fsp--;

                            stream_tableLocation.add(tableLocation117.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:833:10: ( tablePropertiesPrefixed )?
                    int alt34=2;
                    switch ( input.LA(1) ) {
                        case KW_TBLPROPERTIES:
                            {
                            alt34=1;
                            }
                            break;
                    }

                    switch (alt34) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:833:10: tablePropertiesPrefixed
                            {
                            pushFollow(FOLLOW_tablePropertiesPrefixed_in_createTableStatement2613);
                            tablePropertiesPrefixed118=tablePropertiesPrefixed();

                            state._fsp--;

                            stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed118.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:834:10: ( KW_AS selectStatementWithCTE )?
                    int alt35=2;
                    switch ( input.LA(1) ) {
                        case KW_AS:
                            {
                            alt35=1;
                            }
                            break;
                    }

                    switch (alt35) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:834:11: KW_AS selectStatementWithCTE
                            {
                            KW_AS119=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createTableStatement2626);  
                            stream_KW_AS.add(KW_AS119);


                            pushFollow(FOLLOW_selectStatementWithCTE_in_createTableStatement2628);
                            selectStatementWithCTE120=selectStatementWithCTE();

                            state._fsp--;

                            stream_selectStatementWithCTE.add(selectStatementWithCTE120.getTree());

                            }
                            break;

                    }


                    }
                    break;

            }


            // AST REWRITE
            // elements: tableFileFormat, ext, likeName, tableBuckets, tableComment, tableRowFormat, tableLocation, selectStatementWithCTE, tablePropertiesPrefixed, columnNameTypeList, temp, ifNotExists, name, tablePartition, tableSkewed
            // token labels: temp, ext
            // rule labels: retval, name, likeName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_temp=new RewriteRuleTokenStream(adaptor,"token temp",temp);
            RewriteRuleTokenStream stream_ext=new RewriteRuleTokenStream(adaptor,"token ext",ext);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.tree:null);
            RewriteRuleSubtreeStream stream_likeName=new RewriteRuleSubtreeStream(adaptor,"rule likeName",likeName!=null?likeName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 836:5: -> ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:836:8: ^( TOK_CREATETABLE $name ( $temp)? ( $ext)? ( ifNotExists )? ^( TOK_LIKETABLE ( $likeName)? ) ( columnNameTypeList )? ( tableComment )? ( tablePartition )? ( tableBuckets )? ( tableSkewed )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( selectStatementWithCTE )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATETABLE, "TOK_CREATETABLE")
                , root_1);

                adaptor.addChild(root_1, stream_name.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:836:33: ( $temp)?
                if ( stream_temp.hasNext() ) {
                    adaptor.addChild(root_1, stream_temp.nextNode());

                }
                stream_temp.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:836:40: ( $ext)?
                if ( stream_ext.hasNext() ) {
                    adaptor.addChild(root_1, stream_ext.nextNode());

                }
                stream_ext.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:836:45: ( ifNotExists )?
                if ( stream_ifNotExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifNotExists.nextTree());

                }
                stream_ifNotExists.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:837:10: ^( TOK_LIKETABLE ( $likeName)? )
                {
                CommonTree root_2 = (CommonTree)adaptor.nil();
                root_2 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_LIKETABLE, "TOK_LIKETABLE")
                , root_2);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:837:27: ( $likeName)?
                if ( stream_likeName.hasNext() ) {
                    adaptor.addChild(root_2, stream_likeName.nextTree());

                }
                stream_likeName.reset();

                adaptor.addChild(root_1, root_2);
                }

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:838:10: ( columnNameTypeList )?
                if ( stream_columnNameTypeList.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());

                }
                stream_columnNameTypeList.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:839:10: ( tableComment )?
                if ( stream_tableComment.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableComment.nextTree());

                }
                stream_tableComment.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:840:10: ( tablePartition )?
                if ( stream_tablePartition.hasNext() ) {
                    adaptor.addChild(root_1, stream_tablePartition.nextTree());

                }
                stream_tablePartition.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:841:10: ( tableBuckets )?
                if ( stream_tableBuckets.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableBuckets.nextTree());

                }
                stream_tableBuckets.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:842:10: ( tableSkewed )?
                if ( stream_tableSkewed.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableSkewed.nextTree());

                }
                stream_tableSkewed.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:843:10: ( tableRowFormat )?
                if ( stream_tableRowFormat.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableRowFormat.nextTree());

                }
                stream_tableRowFormat.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:844:10: ( tableFileFormat )?
                if ( stream_tableFileFormat.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableFileFormat.nextTree());

                }
                stream_tableFileFormat.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:845:10: ( tableLocation )?
                if ( stream_tableLocation.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableLocation.nextTree());

                }
                stream_tableLocation.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:846:10: ( tablePropertiesPrefixed )?
                if ( stream_tablePropertiesPrefixed.hasNext() ) {
                    adaptor.addChild(root_1, stream_tablePropertiesPrefixed.nextTree());

                }
                stream_tablePropertiesPrefixed.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:847:10: ( selectStatementWithCTE )?
                if ( stream_selectStatementWithCTE.hasNext() ) {
                    adaptor.addChild(root_1, stream_selectStatementWithCTE.nextTree());

                }
                stream_selectStatementWithCTE.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "createTableStatement"


    public static class truncateTableStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "truncateTableStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:851:1: truncateTableStatement : KW_TRUNCATE KW_TABLE tablePartitionPrefix ( KW_COLUMNS LPAREN columnNameList RPAREN )? -> ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? ) ;
    public final HiveParser.truncateTableStatement_return truncateTableStatement() throws RecognitionException {
        HiveParser.truncateTableStatement_return retval = new HiveParser.truncateTableStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_TRUNCATE121=null;
        Token KW_TABLE122=null;
        Token KW_COLUMNS124=null;
        Token LPAREN125=null;
        Token RPAREN127=null;
        HiveParser.tablePartitionPrefix_return tablePartitionPrefix123 =null;

        HiveParser.columnNameList_return columnNameList126 =null;


        CommonTree KW_TRUNCATE121_tree=null;
        CommonTree KW_TABLE122_tree=null;
        CommonTree KW_COLUMNS124_tree=null;
        CommonTree LPAREN125_tree=null;
        CommonTree RPAREN127_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleTokenStream stream_KW_TRUNCATE=new RewriteRuleTokenStream(adaptor,"token KW_TRUNCATE");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
        RewriteRuleSubtreeStream stream_tablePartitionPrefix=new RewriteRuleSubtreeStream(adaptor,"rule tablePartitionPrefix");
         pushMsg("truncate table statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:854:5: ( KW_TRUNCATE KW_TABLE tablePartitionPrefix ( KW_COLUMNS LPAREN columnNameList RPAREN )? -> ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:854:7: KW_TRUNCATE KW_TABLE tablePartitionPrefix ( KW_COLUMNS LPAREN columnNameList RPAREN )?
            {
            KW_TRUNCATE121=(Token)match(input,KW_TRUNCATE,FOLLOW_KW_TRUNCATE_in_truncateTableStatement2835);  
            stream_KW_TRUNCATE.add(KW_TRUNCATE121);


            KW_TABLE122=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_truncateTableStatement2837);  
            stream_KW_TABLE.add(KW_TABLE122);


            pushFollow(FOLLOW_tablePartitionPrefix_in_truncateTableStatement2839);
            tablePartitionPrefix123=tablePartitionPrefix();

            state._fsp--;

            stream_tablePartitionPrefix.add(tablePartitionPrefix123.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:854:49: ( KW_COLUMNS LPAREN columnNameList RPAREN )?
            int alt37=2;
            switch ( input.LA(1) ) {
                case KW_COLUMNS:
                    {
                    alt37=1;
                    }
                    break;
            }

            switch (alt37) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:854:50: KW_COLUMNS LPAREN columnNameList RPAREN
                    {
                    KW_COLUMNS124=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_truncateTableStatement2842);  
                    stream_KW_COLUMNS.add(KW_COLUMNS124);


                    LPAREN125=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_truncateTableStatement2844);  
                    stream_LPAREN.add(LPAREN125);


                    pushFollow(FOLLOW_columnNameList_in_truncateTableStatement2846);
                    columnNameList126=columnNameList();

                    state._fsp--;

                    stream_columnNameList.add(columnNameList126.getTree());

                    RPAREN127=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_truncateTableStatement2848);  
                    stream_RPAREN.add(RPAREN127);


                    }
                    break;

            }


            // AST REWRITE
            // elements: tablePartitionPrefix, columnNameList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 854:92: -> ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:854:95: ^( TOK_TRUNCATETABLE tablePartitionPrefix ( columnNameList )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TRUNCATETABLE, "TOK_TRUNCATETABLE")
                , root_1);

                adaptor.addChild(root_1, stream_tablePartitionPrefix.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:854:136: ( columnNameList )?
                if ( stream_columnNameList.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameList.nextTree());

                }
                stream_columnNameList.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "truncateTableStatement"


    public static class createIndexStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "createIndexStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:856:1: createIndexStatement : KW_CREATE KW_INDEX indexName= identifier KW_ON KW_TABLE tab= tableName LPAREN indexedCols= columnNameList RPAREN KW_AS typeName= StringLiteral ( autoRebuild )? ( indexPropertiesPrefixed )? ( indexTblName )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( indexComment )? -> ^( TOK_CREATEINDEX $indexName $typeName $tab $indexedCols ( autoRebuild )? ( indexPropertiesPrefixed )? ( indexTblName )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( indexComment )? ) ;
    public final HiveParser.createIndexStatement_return createIndexStatement() throws RecognitionException {
        HiveParser.createIndexStatement_return retval = new HiveParser.createIndexStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token typeName=null;
        Token KW_CREATE128=null;
        Token KW_INDEX129=null;
        Token KW_ON130=null;
        Token KW_TABLE131=null;
        Token LPAREN132=null;
        Token RPAREN133=null;
        Token KW_AS134=null;
        HiveParser_IdentifiersParser.identifier_return indexName =null;

        HiveParser_FromClauseParser.tableName_return tab =null;

        HiveParser.columnNameList_return indexedCols =null;

        HiveParser.autoRebuild_return autoRebuild135 =null;

        HiveParser.indexPropertiesPrefixed_return indexPropertiesPrefixed136 =null;

        HiveParser.indexTblName_return indexTblName137 =null;

        HiveParser.tableRowFormat_return tableRowFormat138 =null;

        HiveParser.tableFileFormat_return tableFileFormat139 =null;

        HiveParser.tableLocation_return tableLocation140 =null;

        HiveParser.tablePropertiesPrefixed_return tablePropertiesPrefixed141 =null;

        HiveParser.indexComment_return indexComment142 =null;


        CommonTree typeName_tree=null;
        CommonTree KW_CREATE128_tree=null;
        CommonTree KW_INDEX129_tree=null;
        CommonTree KW_ON130_tree=null;
        CommonTree KW_TABLE131_tree=null;
        CommonTree LPAREN132_tree=null;
        CommonTree RPAREN133_tree=null;
        CommonTree KW_AS134_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_INDEX=new RewriteRuleTokenStream(adaptor,"token KW_INDEX");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_indexPropertiesPrefixed=new RewriteRuleSubtreeStream(adaptor,"rule indexPropertiesPrefixed");
        RewriteRuleSubtreeStream stream_tableRowFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormat");
        RewriteRuleSubtreeStream stream_tableFileFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableFileFormat");
        RewriteRuleSubtreeStream stream_tablePropertiesPrefixed=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesPrefixed");
        RewriteRuleSubtreeStream stream_autoRebuild=new RewriteRuleSubtreeStream(adaptor,"rule autoRebuild");
        RewriteRuleSubtreeStream stream_tableLocation=new RewriteRuleSubtreeStream(adaptor,"rule tableLocation");
        RewriteRuleSubtreeStream stream_indexTblName=new RewriteRuleSubtreeStream(adaptor,"rule indexTblName");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        RewriteRuleSubtreeStream stream_indexComment=new RewriteRuleSubtreeStream(adaptor,"rule indexComment");
         pushMsg("create index statement", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:859:5: ( KW_CREATE KW_INDEX indexName= identifier KW_ON KW_TABLE tab= tableName LPAREN indexedCols= columnNameList RPAREN KW_AS typeName= StringLiteral ( autoRebuild )? ( indexPropertiesPrefixed )? ( indexTblName )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( indexComment )? -> ^( TOK_CREATEINDEX $indexName $typeName $tab $indexedCols ( autoRebuild )? ( indexPropertiesPrefixed )? ( indexTblName )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( indexComment )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:859:7: KW_CREATE KW_INDEX indexName= identifier KW_ON KW_TABLE tab= tableName LPAREN indexedCols= columnNameList RPAREN KW_AS typeName= StringLiteral ( autoRebuild )? ( indexPropertiesPrefixed )? ( indexTblName )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( indexComment )?
            {
            KW_CREATE128=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createIndexStatement2883);  
            stream_KW_CREATE.add(KW_CREATE128);


            KW_INDEX129=(Token)match(input,KW_INDEX,FOLLOW_KW_INDEX_in_createIndexStatement2885);  
            stream_KW_INDEX.add(KW_INDEX129);


            pushFollow(FOLLOW_identifier_in_createIndexStatement2889);
            indexName=identifier();

            state._fsp--;

            stream_identifier.add(indexName.getTree());

            KW_ON130=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_createIndexStatement2897);  
            stream_KW_ON.add(KW_ON130);


            KW_TABLE131=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_createIndexStatement2899);  
            stream_KW_TABLE.add(KW_TABLE131);


            pushFollow(FOLLOW_tableName_in_createIndexStatement2903);
            tab=tableName();

            state._fsp--;

            stream_tableName.add(tab.getTree());

            LPAREN132=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_createIndexStatement2905);  
            stream_LPAREN.add(LPAREN132);


            pushFollow(FOLLOW_columnNameList_in_createIndexStatement2909);
            indexedCols=columnNameList();

            state._fsp--;

            stream_columnNameList.add(indexedCols.getTree());

            RPAREN133=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_createIndexStatement2911);  
            stream_RPAREN.add(RPAREN133);


            KW_AS134=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createIndexStatement2919);  
            stream_KW_AS.add(KW_AS134);


            typeName=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_createIndexStatement2923);  
            stream_StringLiteral.add(typeName);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:862:7: ( autoRebuild )?
            int alt38=2;
            switch ( input.LA(1) ) {
                case KW_WITH:
                    {
                    alt38=1;
                    }
                    break;
            }

            switch (alt38) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:862:7: autoRebuild
                    {
                    pushFollow(FOLLOW_autoRebuild_in_createIndexStatement2931);
                    autoRebuild135=autoRebuild();

                    state._fsp--;

                    stream_autoRebuild.add(autoRebuild135.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:863:7: ( indexPropertiesPrefixed )?
            int alt39=2;
            switch ( input.LA(1) ) {
                case KW_IDXPROPERTIES:
                    {
                    alt39=1;
                    }
                    break;
            }

            switch (alt39) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:863:7: indexPropertiesPrefixed
                    {
                    pushFollow(FOLLOW_indexPropertiesPrefixed_in_createIndexStatement2940);
                    indexPropertiesPrefixed136=indexPropertiesPrefixed();

                    state._fsp--;

                    stream_indexPropertiesPrefixed.add(indexPropertiesPrefixed136.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:864:7: ( indexTblName )?
            int alt40=2;
            switch ( input.LA(1) ) {
                case KW_IN:
                    {
                    alt40=1;
                    }
                    break;
            }

            switch (alt40) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:864:7: indexTblName
                    {
                    pushFollow(FOLLOW_indexTblName_in_createIndexStatement2949);
                    indexTblName137=indexTblName();

                    state._fsp--;

                    stream_indexTblName.add(indexTblName137.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:865:7: ( tableRowFormat )?
            int alt41=2;
            switch ( input.LA(1) ) {
                case KW_ROW:
                    {
                    alt41=1;
                    }
                    break;
            }

            switch (alt41) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:865:7: tableRowFormat
                    {
                    pushFollow(FOLLOW_tableRowFormat_in_createIndexStatement2958);
                    tableRowFormat138=tableRowFormat();

                    state._fsp--;

                    stream_tableRowFormat.add(tableRowFormat138.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:866:7: ( tableFileFormat )?
            int alt42=2;
            switch ( input.LA(1) ) {
                case KW_STORED:
                    {
                    alt42=1;
                    }
                    break;
            }

            switch (alt42) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:866:7: tableFileFormat
                    {
                    pushFollow(FOLLOW_tableFileFormat_in_createIndexStatement2967);
                    tableFileFormat139=tableFileFormat();

                    state._fsp--;

                    stream_tableFileFormat.add(tableFileFormat139.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:867:7: ( tableLocation )?
            int alt43=2;
            switch ( input.LA(1) ) {
                case KW_LOCATION:
                    {
                    alt43=1;
                    }
                    break;
            }

            switch (alt43) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:867:7: tableLocation
                    {
                    pushFollow(FOLLOW_tableLocation_in_createIndexStatement2976);
                    tableLocation140=tableLocation();

                    state._fsp--;

                    stream_tableLocation.add(tableLocation140.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:868:7: ( tablePropertiesPrefixed )?
            int alt44=2;
            switch ( input.LA(1) ) {
                case KW_TBLPROPERTIES:
                    {
                    alt44=1;
                    }
                    break;
            }

            switch (alt44) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:868:7: tablePropertiesPrefixed
                    {
                    pushFollow(FOLLOW_tablePropertiesPrefixed_in_createIndexStatement2985);
                    tablePropertiesPrefixed141=tablePropertiesPrefixed();

                    state._fsp--;

                    stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed141.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:869:7: ( indexComment )?
            int alt45=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt45=1;
                    }
                    break;
            }

            switch (alt45) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:869:7: indexComment
                    {
                    pushFollow(FOLLOW_indexComment_in_createIndexStatement2994);
                    indexComment142=indexComment();

                    state._fsp--;

                    stream_indexComment.add(indexComment142.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: typeName, indexName, tableFileFormat, autoRebuild, indexTblName, tableLocation, indexedCols, indexPropertiesPrefixed, tablePropertiesPrefixed, indexComment, tableRowFormat, tab
            // token labels: typeName
            // rule labels: indexedCols, retval, indexName, tab
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_typeName=new RewriteRuleTokenStream(adaptor,"token typeName",typeName);
            RewriteRuleSubtreeStream stream_indexedCols=new RewriteRuleSubtreeStream(adaptor,"rule indexedCols",indexedCols!=null?indexedCols.tree:null);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_indexName=new RewriteRuleSubtreeStream(adaptor,"rule indexName",indexName!=null?indexName.tree:null);
            RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 870:5: -> ^( TOK_CREATEINDEX $indexName $typeName $tab $indexedCols ( autoRebuild )? ( indexPropertiesPrefixed )? ( indexTblName )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( indexComment )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:870:7: ^( TOK_CREATEINDEX $indexName $typeName $tab $indexedCols ( autoRebuild )? ( indexPropertiesPrefixed )? ( indexTblName )? ( tableRowFormat )? ( tableFileFormat )? ( tableLocation )? ( tablePropertiesPrefixed )? ( indexComment )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEINDEX, "TOK_CREATEINDEX")
                , root_1);

                adaptor.addChild(root_1, stream_indexName.nextTree());

                adaptor.addChild(root_1, stream_typeName.nextNode());

                adaptor.addChild(root_1, stream_tab.nextTree());

                adaptor.addChild(root_1, stream_indexedCols.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:871:9: ( autoRebuild )?
                if ( stream_autoRebuild.hasNext() ) {
                    adaptor.addChild(root_1, stream_autoRebuild.nextTree());

                }
                stream_autoRebuild.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:872:9: ( indexPropertiesPrefixed )?
                if ( stream_indexPropertiesPrefixed.hasNext() ) {
                    adaptor.addChild(root_1, stream_indexPropertiesPrefixed.nextTree());

                }
                stream_indexPropertiesPrefixed.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:873:9: ( indexTblName )?
                if ( stream_indexTblName.hasNext() ) {
                    adaptor.addChild(root_1, stream_indexTblName.nextTree());

                }
                stream_indexTblName.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:874:9: ( tableRowFormat )?
                if ( stream_tableRowFormat.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableRowFormat.nextTree());

                }
                stream_tableRowFormat.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:875:9: ( tableFileFormat )?
                if ( stream_tableFileFormat.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableFileFormat.nextTree());

                }
                stream_tableFileFormat.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:876:9: ( tableLocation )?
                if ( stream_tableLocation.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableLocation.nextTree());

                }
                stream_tableLocation.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:877:9: ( tablePropertiesPrefixed )?
                if ( stream_tablePropertiesPrefixed.hasNext() ) {
                    adaptor.addChild(root_1, stream_tablePropertiesPrefixed.nextTree());

                }
                stream_tablePropertiesPrefixed.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:878:9: ( indexComment )?
                if ( stream_indexComment.hasNext() ) {
                    adaptor.addChild(root_1, stream_indexComment.nextTree());

                }
                stream_indexComment.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "createIndexStatement"


    public static class indexComment_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "indexComment"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:881:1: indexComment : KW_COMMENT comment= StringLiteral -> ^( TOK_INDEXCOMMENT $comment) ;
    public final HiveParser.indexComment_return indexComment() throws RecognitionException {
        HiveParser.indexComment_return retval = new HiveParser.indexComment_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_COMMENT143=null;

        CommonTree comment_tree=null;
        CommonTree KW_COMMENT143_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");

         pushMsg("comment on an index", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:884:9: ( KW_COMMENT comment= StringLiteral -> ^( TOK_INDEXCOMMENT $comment) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:885:17: KW_COMMENT comment= StringLiteral
            {
            KW_COMMENT143=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_indexComment3151);  
            stream_KW_COMMENT.add(KW_COMMENT143);


            comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_indexComment3155);  
            stream_StringLiteral.add(comment);


            // AST REWRITE
            // elements: comment
            // token labels: comment
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 885:51: -> ^( TOK_INDEXCOMMENT $comment)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:885:54: ^( TOK_INDEXCOMMENT $comment)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_INDEXCOMMENT, "TOK_INDEXCOMMENT")
                , root_1);

                adaptor.addChild(root_1, stream_comment.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "indexComment"


    public static class autoRebuild_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "autoRebuild"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:888:1: autoRebuild : KW_WITH KW_DEFERRED KW_REBUILD -> ^( TOK_DEFERRED_REBUILDINDEX ) ;
    public final HiveParser.autoRebuild_return autoRebuild() throws RecognitionException {
        HiveParser.autoRebuild_return retval = new HiveParser.autoRebuild_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_WITH144=null;
        Token KW_DEFERRED145=null;
        Token KW_REBUILD146=null;

        CommonTree KW_WITH144_tree=null;
        CommonTree KW_DEFERRED145_tree=null;
        CommonTree KW_REBUILD146_tree=null;
        RewriteRuleTokenStream stream_KW_REBUILD=new RewriteRuleTokenStream(adaptor,"token KW_REBUILD");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleTokenStream stream_KW_DEFERRED=new RewriteRuleTokenStream(adaptor,"token KW_DEFERRED");

         pushMsg("auto rebuild index", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:891:5: ( KW_WITH KW_DEFERRED KW_REBUILD -> ^( TOK_DEFERRED_REBUILDINDEX ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:891:7: KW_WITH KW_DEFERRED KW_REBUILD
            {
            KW_WITH144=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_autoRebuild3196);  
            stream_KW_WITH.add(KW_WITH144);


            KW_DEFERRED145=(Token)match(input,KW_DEFERRED,FOLLOW_KW_DEFERRED_in_autoRebuild3198);  
            stream_KW_DEFERRED.add(KW_DEFERRED145);


            KW_REBUILD146=(Token)match(input,KW_REBUILD,FOLLOW_KW_REBUILD_in_autoRebuild3200);  
            stream_KW_REBUILD.add(KW_REBUILD146);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 892:5: -> ^( TOK_DEFERRED_REBUILDINDEX )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:892:7: ^( TOK_DEFERRED_REBUILDINDEX )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DEFERRED_REBUILDINDEX, "TOK_DEFERRED_REBUILDINDEX")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "autoRebuild"


    public static class indexTblName_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "indexTblName"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:895:1: indexTblName : KW_IN KW_TABLE indexTbl= tableName -> ^( TOK_CREATEINDEX_INDEXTBLNAME $indexTbl) ;
    public final HiveParser.indexTblName_return indexTblName() throws RecognitionException {
        HiveParser.indexTblName_return retval = new HiveParser.indexTblName_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_IN147=null;
        Token KW_TABLE148=null;
        HiveParser_FromClauseParser.tableName_return indexTbl =null;


        CommonTree KW_IN147_tree=null;
        CommonTree KW_TABLE148_tree=null;
        RewriteRuleTokenStream stream_KW_IN=new RewriteRuleTokenStream(adaptor,"token KW_IN");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
         pushMsg("index table name", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:898:5: ( KW_IN KW_TABLE indexTbl= tableName -> ^( TOK_CREATEINDEX_INDEXTBLNAME $indexTbl) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:898:7: KW_IN KW_TABLE indexTbl= tableName
            {
            KW_IN147=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_indexTblName3236);  
            stream_KW_IN.add(KW_IN147);


            KW_TABLE148=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_indexTblName3238);  
            stream_KW_TABLE.add(KW_TABLE148);


            pushFollow(FOLLOW_tableName_in_indexTblName3242);
            indexTbl=tableName();

            state._fsp--;

            stream_tableName.add(indexTbl.getTree());

            // AST REWRITE
            // elements: indexTbl
            // token labels: 
            // rule labels: retval, indexTbl
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_indexTbl=new RewriteRuleSubtreeStream(adaptor,"rule indexTbl",indexTbl!=null?indexTbl.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 899:5: -> ^( TOK_CREATEINDEX_INDEXTBLNAME $indexTbl)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:899:7: ^( TOK_CREATEINDEX_INDEXTBLNAME $indexTbl)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEINDEX_INDEXTBLNAME, "TOK_CREATEINDEX_INDEXTBLNAME")
                , root_1);

                adaptor.addChild(root_1, stream_indexTbl.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "indexTblName"


    public static class indexPropertiesPrefixed_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "indexPropertiesPrefixed"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:902:1: indexPropertiesPrefixed : KW_IDXPROPERTIES ! indexProperties ;
    public final HiveParser.indexPropertiesPrefixed_return indexPropertiesPrefixed() throws RecognitionException {
        HiveParser.indexPropertiesPrefixed_return retval = new HiveParser.indexPropertiesPrefixed_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_IDXPROPERTIES149=null;
        HiveParser.indexProperties_return indexProperties150 =null;


        CommonTree KW_IDXPROPERTIES149_tree=null;

         pushMsg("table properties with prefix", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:905:5: ( KW_IDXPROPERTIES ! indexProperties )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:906:9: KW_IDXPROPERTIES ! indexProperties
            {
            root_0 = (CommonTree)adaptor.nil();


            KW_IDXPROPERTIES149=(Token)match(input,KW_IDXPROPERTIES,FOLLOW_KW_IDXPROPERTIES_in_indexPropertiesPrefixed3289); 

            pushFollow(FOLLOW_indexProperties_in_indexPropertiesPrefixed3292);
            indexProperties150=indexProperties();

            state._fsp--;

            adaptor.addChild(root_0, indexProperties150.getTree());

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "indexPropertiesPrefixed"


    public static class indexProperties_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "indexProperties"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:909:1: indexProperties : LPAREN indexPropertiesList RPAREN -> ^( TOK_INDEXPROPERTIES indexPropertiesList ) ;
    public final HiveParser.indexProperties_return indexProperties() throws RecognitionException {
        HiveParser.indexProperties_return retval = new HiveParser.indexProperties_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token LPAREN151=null;
        Token RPAREN153=null;
        HiveParser.indexPropertiesList_return indexPropertiesList152 =null;


        CommonTree LPAREN151_tree=null;
        CommonTree RPAREN153_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_indexPropertiesList=new RewriteRuleSubtreeStream(adaptor,"rule indexPropertiesList");
         pushMsg("index properties", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:912:5: ( LPAREN indexPropertiesList RPAREN -> ^( TOK_INDEXPROPERTIES indexPropertiesList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:913:7: LPAREN indexPropertiesList RPAREN
            {
            LPAREN151=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_indexProperties3325);  
            stream_LPAREN.add(LPAREN151);


            pushFollow(FOLLOW_indexPropertiesList_in_indexProperties3327);
            indexPropertiesList152=indexPropertiesList();

            state._fsp--;

            stream_indexPropertiesList.add(indexPropertiesList152.getTree());

            RPAREN153=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_indexProperties3329);  
            stream_RPAREN.add(RPAREN153);


            // AST REWRITE
            // elements: indexPropertiesList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 913:41: -> ^( TOK_INDEXPROPERTIES indexPropertiesList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:913:44: ^( TOK_INDEXPROPERTIES indexPropertiesList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_INDEXPROPERTIES, "TOK_INDEXPROPERTIES")
                , root_1);

                adaptor.addChild(root_1, stream_indexPropertiesList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "indexProperties"


    public static class indexPropertiesList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "indexPropertiesList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:916:1: indexPropertiesList : keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_INDEXPROPLIST ( keyValueProperty )+ ) ;
    public final HiveParser.indexPropertiesList_return indexPropertiesList() throws RecognitionException {
        HiveParser.indexPropertiesList_return retval = new HiveParser.indexPropertiesList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA155=null;
        HiveParser.keyValueProperty_return keyValueProperty154 =null;

        HiveParser.keyValueProperty_return keyValueProperty156 =null;


        CommonTree COMMA155_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_keyValueProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyValueProperty");
         pushMsg("index properties list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:919:5: ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_INDEXPROPLIST ( keyValueProperty )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:920:7: keyValueProperty ( COMMA keyValueProperty )*
            {
            pushFollow(FOLLOW_keyValueProperty_in_indexPropertiesList3370);
            keyValueProperty154=keyValueProperty();

            state._fsp--;

            stream_keyValueProperty.add(keyValueProperty154.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:920:24: ( COMMA keyValueProperty )*
            loop46:
            do {
                int alt46=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt46=1;
                    }
                    break;

                }

                switch (alt46) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:920:25: COMMA keyValueProperty
            	    {
            	    COMMA155=(Token)match(input,COMMA,FOLLOW_COMMA_in_indexPropertiesList3373);  
            	    stream_COMMA.add(COMMA155);


            	    pushFollow(FOLLOW_keyValueProperty_in_indexPropertiesList3375);
            	    keyValueProperty156=keyValueProperty();

            	    state._fsp--;

            	    stream_keyValueProperty.add(keyValueProperty156.getTree());

            	    }
            	    break;

            	default :
            	    break loop46;
                }
            } while (true);


            // AST REWRITE
            // elements: keyValueProperty
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 920:50: -> ^( TOK_INDEXPROPLIST ( keyValueProperty )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:920:53: ^( TOK_INDEXPROPLIST ( keyValueProperty )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_INDEXPROPLIST, "TOK_INDEXPROPLIST")
                , root_1);

                if ( !(stream_keyValueProperty.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_keyValueProperty.hasNext() ) {
                    adaptor.addChild(root_1, stream_keyValueProperty.nextTree());

                }
                stream_keyValueProperty.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "indexPropertiesList"


    public static class dropIndexStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dropIndexStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:923:1: dropIndexStatement : KW_DROP KW_INDEX ( ifExists )? indexName= identifier KW_ON tab= tableName -> ^( TOK_DROPINDEX $indexName $tab ( ifExists )? ) ;
    public final HiveParser.dropIndexStatement_return dropIndexStatement() throws RecognitionException {
        HiveParser.dropIndexStatement_return retval = new HiveParser.dropIndexStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DROP157=null;
        Token KW_INDEX158=null;
        Token KW_ON160=null;
        HiveParser_IdentifiersParser.identifier_return indexName =null;

        HiveParser_FromClauseParser.tableName_return tab =null;

        HiveParser.ifExists_return ifExists159 =null;


        CommonTree KW_DROP157_tree=null;
        CommonTree KW_INDEX158_tree=null;
        CommonTree KW_ON160_tree=null;
        RewriteRuleTokenStream stream_KW_INDEX=new RewriteRuleTokenStream(adaptor,"token KW_INDEX");
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("drop index statement", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:926:5: ( KW_DROP KW_INDEX ( ifExists )? indexName= identifier KW_ON tab= tableName -> ^( TOK_DROPINDEX $indexName $tab ( ifExists )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:926:7: KW_DROP KW_INDEX ( ifExists )? indexName= identifier KW_ON tab= tableName
            {
            KW_DROP157=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropIndexStatement3413);  
            stream_KW_DROP.add(KW_DROP157);


            KW_INDEX158=(Token)match(input,KW_INDEX,FOLLOW_KW_INDEX_in_dropIndexStatement3415);  
            stream_KW_INDEX.add(KW_INDEX158);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:926:24: ( ifExists )?
            int alt47=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt47=1;
                    }
                    break;
            }

            switch (alt47) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:926:24: ifExists
                    {
                    pushFollow(FOLLOW_ifExists_in_dropIndexStatement3417);
                    ifExists159=ifExists();

                    state._fsp--;

                    stream_ifExists.add(ifExists159.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_dropIndexStatement3422);
            indexName=identifier();

            state._fsp--;

            stream_identifier.add(indexName.getTree());

            KW_ON160=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_dropIndexStatement3424);  
            stream_KW_ON.add(KW_ON160);


            pushFollow(FOLLOW_tableName_in_dropIndexStatement3428);
            tab=tableName();

            state._fsp--;

            stream_tableName.add(tab.getTree());

            // AST REWRITE
            // elements: indexName, ifExists, tab
            // token labels: 
            // rule labels: retval, indexName, tab
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_indexName=new RewriteRuleSubtreeStream(adaptor,"rule indexName",indexName!=null?indexName.tree:null);
            RewriteRuleSubtreeStream stream_tab=new RewriteRuleSubtreeStream(adaptor,"rule tab",tab!=null?tab.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 927:5: -> ^( TOK_DROPINDEX $indexName $tab ( ifExists )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:927:7: ^( TOK_DROPINDEX $indexName $tab ( ifExists )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPINDEX, "TOK_DROPINDEX")
                , root_1);

                adaptor.addChild(root_1, stream_indexName.nextTree());

                adaptor.addChild(root_1, stream_tab.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:927:39: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dropIndexStatement"


    public static class dropTableStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dropTableStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:930:1: dropTableStatement : KW_DROP KW_TABLE ( ifExists )? tableName ( KW_PURGE )? -> ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? ) ;
    public final HiveParser.dropTableStatement_return dropTableStatement() throws RecognitionException {
        HiveParser.dropTableStatement_return retval = new HiveParser.dropTableStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DROP161=null;
        Token KW_TABLE162=null;
        Token KW_PURGE165=null;
        HiveParser.ifExists_return ifExists163 =null;

        HiveParser_FromClauseParser.tableName_return tableName164 =null;


        CommonTree KW_DROP161_tree=null;
        CommonTree KW_TABLE162_tree=null;
        CommonTree KW_PURGE165_tree=null;
        RewriteRuleTokenStream stream_KW_PURGE=new RewriteRuleTokenStream(adaptor,"token KW_PURGE");
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
         pushMsg("drop statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:5: ( KW_DROP KW_TABLE ( ifExists )? tableName ( KW_PURGE )? -> ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:7: KW_DROP KW_TABLE ( ifExists )? tableName ( KW_PURGE )?
            {
            KW_DROP161=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropTableStatement3473);  
            stream_KW_DROP.add(KW_DROP161);


            KW_TABLE162=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_dropTableStatement3475);  
            stream_KW_TABLE.add(KW_TABLE162);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:24: ( ifExists )?
            int alt48=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt48=1;
                    }
                    break;
            }

            switch (alt48) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:24: ifExists
                    {
                    pushFollow(FOLLOW_ifExists_in_dropTableStatement3477);
                    ifExists163=ifExists();

                    state._fsp--;

                    stream_ifExists.add(ifExists163.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_tableName_in_dropTableStatement3480);
            tableName164=tableName();

            state._fsp--;

            stream_tableName.add(tableName164.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:44: ( KW_PURGE )?
            int alt49=2;
            switch ( input.LA(1) ) {
                case KW_PURGE:
                    {
                    alt49=1;
                    }
                    break;
            }

            switch (alt49) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:44: KW_PURGE
                    {
                    KW_PURGE165=(Token)match(input,KW_PURGE,FOLLOW_KW_PURGE_in_dropTableStatement3482);  
                    stream_KW_PURGE.add(KW_PURGE165);


                    }
                    break;

            }


            // AST REWRITE
            // elements: ifExists, tableName, KW_PURGE
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 933:54: -> ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:57: ^( TOK_DROPTABLE tableName ( ifExists )? ( KW_PURGE )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPTABLE, "TOK_DROPTABLE")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:83: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:933:93: ( KW_PURGE )?
                if ( stream_KW_PURGE.hasNext() ) {
                    adaptor.addChild(root_1, 
                    stream_KW_PURGE.nextNode()
                    );

                }
                stream_KW_PURGE.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dropTableStatement"


    public static class alterStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:936:1: alterStatement : ( KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^( TOK_ALTERTABLE tableName alterTableStatementSuffix ) | KW_ALTER KW_VIEW tableName ( KW_AS )? alterViewStatementSuffix -> ^( TOK_ALTERVIEW tableName alterViewStatementSuffix ) | KW_ALTER KW_INDEX alterIndexStatementSuffix -> alterIndexStatementSuffix | KW_ALTER ( KW_DATABASE | KW_SCHEMA ) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix );
    public final HiveParser.alterStatement_return alterStatement() throws RecognitionException {
        HiveParser.alterStatement_return retval = new HiveParser.alterStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ALTER166=null;
        Token KW_TABLE167=null;
        Token KW_ALTER170=null;
        Token KW_VIEW171=null;
        Token KW_AS173=null;
        Token KW_ALTER175=null;
        Token KW_INDEX176=null;
        Token KW_ALTER178=null;
        Token KW_DATABASE179=null;
        Token KW_SCHEMA180=null;
        HiveParser_FromClauseParser.tableName_return tableName168 =null;

        HiveParser.alterTableStatementSuffix_return alterTableStatementSuffix169 =null;

        HiveParser_FromClauseParser.tableName_return tableName172 =null;

        HiveParser.alterViewStatementSuffix_return alterViewStatementSuffix174 =null;

        HiveParser.alterIndexStatementSuffix_return alterIndexStatementSuffix177 =null;

        HiveParser.alterDatabaseStatementSuffix_return alterDatabaseStatementSuffix181 =null;


        CommonTree KW_ALTER166_tree=null;
        CommonTree KW_TABLE167_tree=null;
        CommonTree KW_ALTER170_tree=null;
        CommonTree KW_VIEW171_tree=null;
        CommonTree KW_AS173_tree=null;
        CommonTree KW_ALTER175_tree=null;
        CommonTree KW_INDEX176_tree=null;
        CommonTree KW_ALTER178_tree=null;
        CommonTree KW_DATABASE179_tree=null;
        CommonTree KW_SCHEMA180_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_ALTER=new RewriteRuleTokenStream(adaptor,"token KW_ALTER");
        RewriteRuleTokenStream stream_KW_INDEX=new RewriteRuleTokenStream(adaptor,"token KW_INDEX");
        RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_alterTableStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterTableStatementSuffix");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_alterDatabaseStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterDatabaseStatementSuffix");
        RewriteRuleSubtreeStream stream_alterViewStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterViewStatementSuffix");
        RewriteRuleSubtreeStream stream_alterIndexStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterIndexStatementSuffix");
         pushMsg("alter statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:939:5: ( KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^( TOK_ALTERTABLE tableName alterTableStatementSuffix ) | KW_ALTER KW_VIEW tableName ( KW_AS )? alterViewStatementSuffix -> ^( TOK_ALTERVIEW tableName alterViewStatementSuffix ) | KW_ALTER KW_INDEX alterIndexStatementSuffix -> alterIndexStatementSuffix | KW_ALTER ( KW_DATABASE | KW_SCHEMA ) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix )
            int alt52=4;
            switch ( input.LA(1) ) {
            case KW_ALTER:
                {
                switch ( input.LA(2) ) {
                case KW_TABLE:
                    {
                    alt52=1;
                    }
                    break;
                case KW_VIEW:
                    {
                    alt52=2;
                    }
                    break;
                case KW_INDEX:
                    {
                    alt52=3;
                    }
                    break;
                case KW_DATABASE:
                case KW_SCHEMA:
                    {
                    alt52=4;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 52, 1, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 52, 0, input);

                throw nvae;

            }

            switch (alt52) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:939:7: KW_ALTER KW_TABLE tableName alterTableStatementSuffix
                    {
                    KW_ALTER166=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement3524);  
                    stream_KW_ALTER.add(KW_ALTER166);


                    KW_TABLE167=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_alterStatement3526);  
                    stream_KW_TABLE.add(KW_TABLE167);


                    pushFollow(FOLLOW_tableName_in_alterStatement3528);
                    tableName168=tableName();

                    state._fsp--;

                    stream_tableName.add(tableName168.getTree());

                    pushFollow(FOLLOW_alterTableStatementSuffix_in_alterStatement3530);
                    alterTableStatementSuffix169=alterTableStatementSuffix();

                    state._fsp--;

                    stream_alterTableStatementSuffix.add(alterTableStatementSuffix169.getTree());

                    // AST REWRITE
                    // elements: tableName, alterTableStatementSuffix
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 939:61: -> ^( TOK_ALTERTABLE tableName alterTableStatementSuffix )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:939:64: ^( TOK_ALTERTABLE tableName alterTableStatementSuffix )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE, "TOK_ALTERTABLE")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        adaptor.addChild(root_1, stream_alterTableStatementSuffix.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:940:7: KW_ALTER KW_VIEW tableName ( KW_AS )? alterViewStatementSuffix
                    {
                    KW_ALTER170=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement3548);  
                    stream_KW_ALTER.add(KW_ALTER170);


                    KW_VIEW171=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_alterStatement3550);  
                    stream_KW_VIEW.add(KW_VIEW171);


                    pushFollow(FOLLOW_tableName_in_alterStatement3552);
                    tableName172=tableName();

                    state._fsp--;

                    stream_tableName.add(tableName172.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:940:34: ( KW_AS )?
                    int alt50=2;
                    switch ( input.LA(1) ) {
                        case KW_AS:
                            {
                            alt50=1;
                            }
                            break;
                    }

                    switch (alt50) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:940:34: KW_AS
                            {
                            KW_AS173=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_alterStatement3554);  
                            stream_KW_AS.add(KW_AS173);


                            }
                            break;

                    }


                    pushFollow(FOLLOW_alterViewStatementSuffix_in_alterStatement3557);
                    alterViewStatementSuffix174=alterViewStatementSuffix();

                    state._fsp--;

                    stream_alterViewStatementSuffix.add(alterViewStatementSuffix174.getTree());

                    // AST REWRITE
                    // elements: tableName, alterViewStatementSuffix
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 940:66: -> ^( TOK_ALTERVIEW tableName alterViewStatementSuffix )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:940:69: ^( TOK_ALTERVIEW tableName alterViewStatementSuffix )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERVIEW, "TOK_ALTERVIEW")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        adaptor.addChild(root_1, stream_alterViewStatementSuffix.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:941:7: KW_ALTER KW_INDEX alterIndexStatementSuffix
                    {
                    KW_ALTER175=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement3575);  
                    stream_KW_ALTER.add(KW_ALTER175);


                    KW_INDEX176=(Token)match(input,KW_INDEX,FOLLOW_KW_INDEX_in_alterStatement3577);  
                    stream_KW_INDEX.add(KW_INDEX176);


                    pushFollow(FOLLOW_alterIndexStatementSuffix_in_alterStatement3579);
                    alterIndexStatementSuffix177=alterIndexStatementSuffix();

                    state._fsp--;

                    stream_alterIndexStatementSuffix.add(alterIndexStatementSuffix177.getTree());

                    // AST REWRITE
                    // elements: alterIndexStatementSuffix
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 941:51: -> alterIndexStatementSuffix
                    {
                        adaptor.addChild(root_0, stream_alterIndexStatementSuffix.nextTree());

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:942:7: KW_ALTER ( KW_DATABASE | KW_SCHEMA ) alterDatabaseStatementSuffix
                    {
                    KW_ALTER178=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_alterStatement3591);  
                    stream_KW_ALTER.add(KW_ALTER178);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:942:16: ( KW_DATABASE | KW_SCHEMA )
                    int alt51=2;
                    switch ( input.LA(1) ) {
                    case KW_DATABASE:
                        {
                        alt51=1;
                        }
                        break;
                    case KW_SCHEMA:
                        {
                        alt51=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 51, 0, input);

                        throw nvae;

                    }

                    switch (alt51) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:942:17: KW_DATABASE
                            {
                            KW_DATABASE179=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_alterStatement3594);  
                            stream_KW_DATABASE.add(KW_DATABASE179);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:942:29: KW_SCHEMA
                            {
                            KW_SCHEMA180=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_alterStatement3596);  
                            stream_KW_SCHEMA.add(KW_SCHEMA180);


                            }
                            break;

                    }


                    pushFollow(FOLLOW_alterDatabaseStatementSuffix_in_alterStatement3599);
                    alterDatabaseStatementSuffix181=alterDatabaseStatementSuffix();

                    state._fsp--;

                    stream_alterDatabaseStatementSuffix.add(alterDatabaseStatementSuffix181.getTree());

                    // AST REWRITE
                    // elements: alterDatabaseStatementSuffix
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 942:69: -> alterDatabaseStatementSuffix
                    {
                        adaptor.addChild(root_0, stream_alterDatabaseStatementSuffix.nextTree());

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatement"


    public static class alterTableStatementSuffix_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterTableStatementSuffix"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:945:1: alterTableStatementSuffix : ( alterStatementSuffixRename[true] | alterStatementSuffixUpdateStatsCol | alterStatementSuffixDropPartitions[true] | alterStatementSuffixAddPartitions[true] | alterStatementSuffixTouch | alterStatementSuffixArchive | alterStatementSuffixUnArchive | alterStatementSuffixProperties | alterStatementSuffixSkewedby | alterStatementSuffixExchangePartition | alterStatementPartitionKeyType | ( partitionSpec )? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix ( partitionSpec )? );
    public final HiveParser.alterTableStatementSuffix_return alterTableStatementSuffix() throws RecognitionException {
        HiveParser.alterTableStatementSuffix_return retval = new HiveParser.alterTableStatementSuffix_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.alterStatementSuffixRename_return alterStatementSuffixRename182 =null;

        HiveParser.alterStatementSuffixUpdateStatsCol_return alterStatementSuffixUpdateStatsCol183 =null;

        HiveParser.alterStatementSuffixDropPartitions_return alterStatementSuffixDropPartitions184 =null;

        HiveParser.alterStatementSuffixAddPartitions_return alterStatementSuffixAddPartitions185 =null;

        HiveParser.alterStatementSuffixTouch_return alterStatementSuffixTouch186 =null;

        HiveParser.alterStatementSuffixArchive_return alterStatementSuffixArchive187 =null;

        HiveParser.alterStatementSuffixUnArchive_return alterStatementSuffixUnArchive188 =null;

        HiveParser.alterStatementSuffixProperties_return alterStatementSuffixProperties189 =null;

        HiveParser.alterStatementSuffixSkewedby_return alterStatementSuffixSkewedby190 =null;

        HiveParser.alterStatementSuffixExchangePartition_return alterStatementSuffixExchangePartition191 =null;

        HiveParser.alterStatementPartitionKeyType_return alterStatementPartitionKeyType192 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec193 =null;

        HiveParser.alterTblPartitionStatementSuffix_return alterTblPartitionStatementSuffix194 =null;


        RewriteRuleSubtreeStream stream_alterTblPartitionStatementSuffix=new RewriteRuleSubtreeStream(adaptor,"rule alterTblPartitionStatementSuffix");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("alter table statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:948:5: ( alterStatementSuffixRename[true] | alterStatementSuffixUpdateStatsCol | alterStatementSuffixDropPartitions[true] | alterStatementSuffixAddPartitions[true] | alterStatementSuffixTouch | alterStatementSuffixArchive | alterStatementSuffixUnArchive | alterStatementSuffixProperties | alterStatementSuffixSkewedby | alterStatementSuffixExchangePartition | alterStatementPartitionKeyType | ( partitionSpec )? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix ( partitionSpec )? )
            int alt54=12;
            switch ( input.LA(1) ) {
            case KW_RENAME:
                {
                switch ( input.LA(2) ) {
                case KW_TO:
                    {
                    switch ( input.LA(3) ) {
                    case Identifier:
                    case KW_ADD:
                    case KW_ADMIN:
                    case KW_AFTER:
                    case KW_ALL:
                    case KW_ALTER:
                    case KW_ANALYZE:
                    case KW_ARCHIVE:
                    case KW_ARRAY:
                    case KW_AS:
                    case KW_ASC:
                    case KW_AUTHORIZATION:
                    case KW_BEFORE:
                    case KW_BETWEEN:
                    case KW_BIGINT:
                    case KW_BINARY:
                    case KW_BOOLEAN:
                    case KW_BOTH:
                    case KW_BUCKET:
                    case KW_BUCKETS:
                    case KW_BY:
                    case KW_CASCADE:
                    case KW_CHANGE:
                    case KW_CLUSTER:
                    case KW_CLUSTERED:
                    case KW_CLUSTERSTATUS:
                    case KW_COLLECTION:
                    case KW_COLUMNS:
                    case KW_COMMENT:
                    case KW_COMPACT:
                    case KW_COMPACTIONS:
                    case KW_COMPUTE:
                    case KW_CONCATENATE:
                    case KW_CONTINUE:
                    case KW_CREATE:
                    case KW_CUBE:
                    case KW_CURSOR:
                    case KW_DATA:
                    case KW_DATABASES:
                    case KW_DATE:
                    case KW_DATETIME:
                    case KW_DBPROPERTIES:
                    case KW_DECIMAL:
                    case KW_DEFAULT:
                    case KW_DEFERRED:
                    case KW_DEFINED:
                    case KW_DELETE:
                    case KW_DELIMITED:
                    case KW_DEPENDENCY:
                    case KW_DESC:
                    case KW_DESCRIBE:
                    case KW_DIRECTORIES:
                    case KW_DIRECTORY:
                    case KW_DISABLE:
                    case KW_DISTRIBUTE:
                    case KW_DOUBLE:
                    case KW_DROP:
                    case KW_ELEM_TYPE:
                    case KW_ENABLE:
                    case KW_ESCAPED:
                    case KW_EXCLUSIVE:
                    case KW_EXISTS:
                    case KW_EXPLAIN:
                    case KW_EXPORT:
                    case KW_EXTERNAL:
                    case KW_FALSE:
                    case KW_FETCH:
                    case KW_FIELDS:
                    case KW_FILE:
                    case KW_FILEFORMAT:
                    case KW_FIRST:
                    case KW_FLOAT:
                    case KW_FOR:
                    case KW_FORMAT:
                    case KW_FORMATTED:
                    case KW_FULL:
                    case KW_FUNCTIONS:
                    case KW_GRANT:
                    case KW_GROUP:
                    case KW_GROUPING:
                    case KW_HOLD_DDLTIME:
                    case KW_IDXPROPERTIES:
                    case KW_IGNORE:
                    case KW_IMPORT:
                    case KW_IN:
                    case KW_INDEX:
                    case KW_INDEXES:
                    case KW_INNER:
                    case KW_INPATH:
                    case KW_INPUTDRIVER:
                    case KW_INPUTFORMAT:
                    case KW_INSERT:
                    case KW_INT:
                    case KW_INTERSECT:
                    case KW_INTO:
                    case KW_IS:
                    case KW_ITEMS:
                    case KW_JAR:
                    case KW_KEYS:
                    case KW_KEY_TYPE:
                    case KW_LATERAL:
                    case KW_LEFT:
                    case KW_LIKE:
                    case KW_LIMIT:
                    case KW_LINES:
                    case KW_LOAD:
                    case KW_LOCAL:
                    case KW_LOCATION:
                    case KW_LOCK:
                    case KW_LOCKS:
                    case KW_LOGICAL:
                    case KW_LONG:
                    case KW_MAPJOIN:
                    case KW_MATERIALIZED:
                    case KW_MINUS:
                    case KW_MSCK:
                    case KW_NONE:
                    case KW_NOSCAN:
                    case KW_NO_DROP:
                    case KW_NULL:
                    case KW_OF:
                    case KW_OFFLINE:
                    case KW_OPTION:
                    case KW_ORDER:
                    case KW_OUT:
                    case KW_OUTER:
                    case KW_OUTPUTDRIVER:
                    case KW_OUTPUTFORMAT:
                    case KW_OVERWRITE:
                    case KW_OWNER:
                    case KW_PARTITIONED:
                    case KW_PARTITIONS:
                    case KW_PERCENT:
                    case KW_PLUS:
                    case KW_PRETTY:
                    case KW_PRINCIPALS:
                    case KW_PROCEDURE:
                    case KW_PROTECTION:
                    case KW_PURGE:
                    case KW_RANGE:
                    case KW_READ:
                    case KW_READONLY:
                    case KW_READS:
                    case KW_REBUILD:
                    case KW_RECORDREADER:
                    case KW_RECORDWRITER:
                    case KW_REGEXP:
                    case KW_RENAME:
                    case KW_REPAIR:
                    case KW_REPLACE:
                    case KW_RESTRICT:
                    case KW_REVOKE:
                    case KW_REWRITE:
                    case KW_RIGHT:
                    case KW_RLIKE:
                    case KW_ROLE:
                    case KW_ROLES:
                    case KW_ROLLUP:
                    case KW_ROW:
                    case KW_ROWS:
                    case KW_SCHEMA:
                    case KW_SCHEMAS:
                    case KW_SEMI:
                    case KW_SERDE:
                    case KW_SERDEPROPERTIES:
                    case KW_SET:
                    case KW_SETS:
                    case KW_SHARED:
                    case KW_SHOW:
                    case KW_SHOW_DATABASE:
                    case KW_SKEWED:
                    case KW_SMALLINT:
                    case KW_SORT:
                    case KW_SORTED:
                    case KW_SSL:
                    case KW_STATISTICS:
                    case KW_STORED:
                    case KW_STREAMTABLE:
                    case KW_STRING:
                    case KW_STRUCT:
                    case KW_TABLE:
                    case KW_TABLES:
                    case KW_TBLPROPERTIES:
                    case KW_TEMPORARY:
                    case KW_TERMINATED:
                    case KW_TIMESTAMP:
                    case KW_TINYINT:
                    case KW_TO:
                    case KW_TOUCH:
                    case KW_TRANSACTIONS:
                    case KW_TRIGGER:
                    case KW_TRUE:
                    case KW_TRUNCATE:
                    case KW_UNARCHIVE:
                    case KW_UNDO:
                    case KW_UNION:
                    case KW_UNIONTYPE:
                    case KW_UNLOCK:
                    case KW_UNSET:
                    case KW_UNSIGNED:
                    case KW_UPDATE:
                    case KW_USE:
                    case KW_USER:
                    case KW_USING:
                    case KW_UTC:
                    case KW_UTCTIMESTAMP:
                    case KW_VALUES:
                    case KW_VALUE_TYPE:
                    case KW_VIEW:
                    case KW_WHILE:
                    case KW_WITH:
                        {
                        alt54=1;
                        }
                        break;
                    case KW_PARTITION:
                        {
                        alt54=1;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 54, 22, input);

                        throw nvae;

                    }

                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 54, 1, input);

                    throw nvae;

                }

                }
                break;
            case KW_UPDATE:
                {
                switch ( input.LA(2) ) {
                case KW_STATISTICS:
                    {
                    switch ( input.LA(3) ) {
                    case KW_FOR:
                        {
                        alt54=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 54, 23, input);

                        throw nvae;

                    }

                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 54, 2, input);

                    throw nvae;

                }

                }
                break;
            case KW_DROP:
                {
                alt54=3;
                }
                break;
            case KW_ADD:
                {
                switch ( input.LA(2) ) {
                case KW_IF:
                case KW_PARTITION:
                    {
                    alt54=4;
                    }
                    break;
                case KW_COLUMNS:
                    {
                    alt54=12;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 54, 4, input);

                    throw nvae;

                }

                }
                break;
            case KW_TOUCH:
                {
                alt54=5;
                }
                break;
            case KW_ARCHIVE:
                {
                alt54=6;
                }
                break;
            case KW_UNARCHIVE:
                {
                alt54=7;
                }
                break;
            case KW_SET:
                {
                switch ( input.LA(2) ) {
                case KW_TBLPROPERTIES:
                    {
                    alt54=8;
                    }
                    break;
                case KW_FILEFORMAT:
                case KW_LOCATION:
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                case KW_SKEWED:
                    {
                    alt54=12;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 54, 8, input);

                    throw nvae;

                }

                }
                break;
            case KW_UNSET:
                {
                alt54=8;
                }
                break;
            case KW_SKEWED:
                {
                alt54=9;
                }
                break;
            case KW_NOT:
                {
                switch ( input.LA(2) ) {
                case KW_SKEWED:
                case KW_STORED:
                    {
                    alt54=9;
                    }
                    break;
                case KW_CLUSTERED:
                case KW_SORTED:
                    {
                    alt54=12;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 54, 11, input);

                    throw nvae;

                }

                }
                break;
            case KW_EXCHANGE:
                {
                alt54=10;
                }
                break;
            case KW_PARTITION:
                {
                switch ( input.LA(2) ) {
                case KW_COLUMN:
                    {
                    alt54=11;
                    }
                    break;
                case LPAREN:
                    {
                    alt54=12;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 54, 13, input);

                    throw nvae;

                }

                }
                break;
            case KW_CHANGE:
            case KW_CLUSTERED:
            case KW_COMPACT:
            case KW_CONCATENATE:
            case KW_DISABLE:
            case KW_ENABLE:
            case KW_INTO:
            case KW_REPLACE:
                {
                alt54=12;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 54, 0, input);

                throw nvae;

            }

            switch (alt54) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:948:7: alterStatementSuffixRename[true]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixRename_in_alterTableStatementSuffix3630);
                    alterStatementSuffixRename182=alterStatementSuffixRename(true);

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixRename182.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:949:7: alterStatementSuffixUpdateStatsCol
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixUpdateStatsCol_in_alterTableStatementSuffix3639);
                    alterStatementSuffixUpdateStatsCol183=alterStatementSuffixUpdateStatsCol();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixUpdateStatsCol183.getTree());

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:950:7: alterStatementSuffixDropPartitions[true]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixDropPartitions_in_alterTableStatementSuffix3647);
                    alterStatementSuffixDropPartitions184=alterStatementSuffixDropPartitions(true);

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixDropPartitions184.getTree());

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:951:7: alterStatementSuffixAddPartitions[true]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixAddPartitions_in_alterTableStatementSuffix3656);
                    alterStatementSuffixAddPartitions185=alterStatementSuffixAddPartitions(true);

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixAddPartitions185.getTree());

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:952:7: alterStatementSuffixTouch
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixTouch_in_alterTableStatementSuffix3665);
                    alterStatementSuffixTouch186=alterStatementSuffixTouch();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixTouch186.getTree());

                    }
                    break;
                case 6 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:953:7: alterStatementSuffixArchive
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixArchive_in_alterTableStatementSuffix3673);
                    alterStatementSuffixArchive187=alterStatementSuffixArchive();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixArchive187.getTree());

                    }
                    break;
                case 7 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:954:7: alterStatementSuffixUnArchive
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixUnArchive_in_alterTableStatementSuffix3681);
                    alterStatementSuffixUnArchive188=alterStatementSuffixUnArchive();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixUnArchive188.getTree());

                    }
                    break;
                case 8 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:955:7: alterStatementSuffixProperties
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixProperties_in_alterTableStatementSuffix3689);
                    alterStatementSuffixProperties189=alterStatementSuffixProperties();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixProperties189.getTree());

                    }
                    break;
                case 9 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:956:7: alterStatementSuffixSkewedby
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixSkewedby_in_alterTableStatementSuffix3697);
                    alterStatementSuffixSkewedby190=alterStatementSuffixSkewedby();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixSkewedby190.getTree());

                    }
                    break;
                case 10 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:957:7: alterStatementSuffixExchangePartition
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixExchangePartition_in_alterTableStatementSuffix3705);
                    alterStatementSuffixExchangePartition191=alterStatementSuffixExchangePartition();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixExchangePartition191.getTree());

                    }
                    break;
                case 11 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:958:7: alterStatementPartitionKeyType
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementPartitionKeyType_in_alterTableStatementSuffix3713);
                    alterStatementPartitionKeyType192=alterStatementPartitionKeyType();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementPartitionKeyType192.getTree());

                    }
                    break;
                case 12 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:959:7: ( partitionSpec )? alterTblPartitionStatementSuffix
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:959:7: ( partitionSpec )?
                    int alt53=2;
                    switch ( input.LA(1) ) {
                        case KW_PARTITION:
                            {
                            alt53=1;
                            }
                            break;
                    }

                    switch (alt53) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:959:7: partitionSpec
                            {
                            pushFollow(FOLLOW_partitionSpec_in_alterTableStatementSuffix3721);
                            partitionSpec193=partitionSpec();

                            state._fsp--;

                            stream_partitionSpec.add(partitionSpec193.getTree());

                            }
                            break;

                    }


                    pushFollow(FOLLOW_alterTblPartitionStatementSuffix_in_alterTableStatementSuffix3724);
                    alterTblPartitionStatementSuffix194=alterTblPartitionStatementSuffix();

                    state._fsp--;

                    stream_alterTblPartitionStatementSuffix.add(alterTblPartitionStatementSuffix194.getTree());

                    // AST REWRITE
                    // elements: alterTblPartitionStatementSuffix, partitionSpec
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 959:55: -> alterTblPartitionStatementSuffix ( partitionSpec )?
                    {
                        adaptor.addChild(root_0, stream_alterTblPartitionStatementSuffix.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:959:91: ( partitionSpec )?
                        if ( stream_partitionSpec.hasNext() ) {
                            adaptor.addChild(root_0, stream_partitionSpec.nextTree());

                        }
                        stream_partitionSpec.reset();

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterTableStatementSuffix"


    public static class alterTblPartitionStatementSuffix_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterTblPartitionStatementSuffix"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:962:1: alterTblPartitionStatementSuffix : ( alterStatementSuffixFileFormat | alterStatementSuffixLocation | alterStatementSuffixProtectMode | alterStatementSuffixMergeFiles | alterStatementSuffixSerdeProperties | alterStatementSuffixRenamePart | alterStatementSuffixBucketNum | alterTblPartitionStatementSuffixSkewedLocation | alterStatementSuffixClusterbySortby | alterStatementSuffixCompact | alterStatementSuffixUpdateStatsCol | alterStatementSuffixRenameCol | alterStatementSuffixAddCol );
    public final HiveParser.alterTblPartitionStatementSuffix_return alterTblPartitionStatementSuffix() throws RecognitionException {
        HiveParser.alterTblPartitionStatementSuffix_return retval = new HiveParser.alterTblPartitionStatementSuffix_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.alterStatementSuffixFileFormat_return alterStatementSuffixFileFormat195 =null;

        HiveParser.alterStatementSuffixLocation_return alterStatementSuffixLocation196 =null;

        HiveParser.alterStatementSuffixProtectMode_return alterStatementSuffixProtectMode197 =null;

        HiveParser.alterStatementSuffixMergeFiles_return alterStatementSuffixMergeFiles198 =null;

        HiveParser.alterStatementSuffixSerdeProperties_return alterStatementSuffixSerdeProperties199 =null;

        HiveParser.alterStatementSuffixRenamePart_return alterStatementSuffixRenamePart200 =null;

        HiveParser.alterStatementSuffixBucketNum_return alterStatementSuffixBucketNum201 =null;

        HiveParser.alterTblPartitionStatementSuffixSkewedLocation_return alterTblPartitionStatementSuffixSkewedLocation202 =null;

        HiveParser.alterStatementSuffixClusterbySortby_return alterStatementSuffixClusterbySortby203 =null;

        HiveParser.alterStatementSuffixCompact_return alterStatementSuffixCompact204 =null;

        HiveParser.alterStatementSuffixUpdateStatsCol_return alterStatementSuffixUpdateStatsCol205 =null;

        HiveParser.alterStatementSuffixRenameCol_return alterStatementSuffixRenameCol206 =null;

        HiveParser.alterStatementSuffixAddCol_return alterStatementSuffixAddCol207 =null;



        pushMsg("alter table partition statement suffix", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:965:3: ( alterStatementSuffixFileFormat | alterStatementSuffixLocation | alterStatementSuffixProtectMode | alterStatementSuffixMergeFiles | alterStatementSuffixSerdeProperties | alterStatementSuffixRenamePart | alterStatementSuffixBucketNum | alterTblPartitionStatementSuffixSkewedLocation | alterStatementSuffixClusterbySortby | alterStatementSuffixCompact | alterStatementSuffixUpdateStatsCol | alterStatementSuffixRenameCol | alterStatementSuffixAddCol )
            int alt55=13;
            switch ( input.LA(1) ) {
            case KW_SET:
                {
                switch ( input.LA(2) ) {
                case KW_FILEFORMAT:
                    {
                    alt55=1;
                    }
                    break;
                case KW_LOCATION:
                    {
                    alt55=2;
                    }
                    break;
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                    {
                    alt55=5;
                    }
                    break;
                case KW_SKEWED:
                    {
                    alt55=8;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 55, 1, input);

                    throw nvae;

                }

                }
                break;
            case KW_DISABLE:
            case KW_ENABLE:
                {
                alt55=3;
                }
                break;
            case KW_CONCATENATE:
                {
                alt55=4;
                }
                break;
            case KW_RENAME:
                {
                alt55=6;
                }
                break;
            case KW_INTO:
                {
                alt55=7;
                }
                break;
            case KW_CLUSTERED:
            case KW_NOT:
                {
                alt55=9;
                }
                break;
            case KW_COMPACT:
                {
                alt55=10;
                }
                break;
            case KW_UPDATE:
                {
                alt55=11;
                }
                break;
            case KW_CHANGE:
                {
                alt55=12;
                }
                break;
            case KW_ADD:
            case KW_REPLACE:
                {
                alt55=13;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 55, 0, input);

                throw nvae;

            }

            switch (alt55) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:965:5: alterStatementSuffixFileFormat
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixFileFormat_in_alterTblPartitionStatementSuffix3756);
                    alterStatementSuffixFileFormat195=alterStatementSuffixFileFormat();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixFileFormat195.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:966:5: alterStatementSuffixLocation
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixLocation_in_alterTblPartitionStatementSuffix3762);
                    alterStatementSuffixLocation196=alterStatementSuffixLocation();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixLocation196.getTree());

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:967:5: alterStatementSuffixProtectMode
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixProtectMode_in_alterTblPartitionStatementSuffix3768);
                    alterStatementSuffixProtectMode197=alterStatementSuffixProtectMode();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixProtectMode197.getTree());

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:968:5: alterStatementSuffixMergeFiles
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixMergeFiles_in_alterTblPartitionStatementSuffix3774);
                    alterStatementSuffixMergeFiles198=alterStatementSuffixMergeFiles();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixMergeFiles198.getTree());

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:969:5: alterStatementSuffixSerdeProperties
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixSerdeProperties_in_alterTblPartitionStatementSuffix3780);
                    alterStatementSuffixSerdeProperties199=alterStatementSuffixSerdeProperties();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixSerdeProperties199.getTree());

                    }
                    break;
                case 6 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:970:5: alterStatementSuffixRenamePart
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixRenamePart_in_alterTblPartitionStatementSuffix3786);
                    alterStatementSuffixRenamePart200=alterStatementSuffixRenamePart();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixRenamePart200.getTree());

                    }
                    break;
                case 7 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:971:5: alterStatementSuffixBucketNum
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixBucketNum_in_alterTblPartitionStatementSuffix3792);
                    alterStatementSuffixBucketNum201=alterStatementSuffixBucketNum();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixBucketNum201.getTree());

                    }
                    break;
                case 8 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:972:5: alterTblPartitionStatementSuffixSkewedLocation
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterTblPartitionStatementSuffixSkewedLocation_in_alterTblPartitionStatementSuffix3798);
                    alterTblPartitionStatementSuffixSkewedLocation202=alterTblPartitionStatementSuffixSkewedLocation();

                    state._fsp--;

                    adaptor.addChild(root_0, alterTblPartitionStatementSuffixSkewedLocation202.getTree());

                    }
                    break;
                case 9 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:973:5: alterStatementSuffixClusterbySortby
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixClusterbySortby_in_alterTblPartitionStatementSuffix3804);
                    alterStatementSuffixClusterbySortby203=alterStatementSuffixClusterbySortby();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixClusterbySortby203.getTree());

                    }
                    break;
                case 10 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:974:5: alterStatementSuffixCompact
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixCompact_in_alterTblPartitionStatementSuffix3810);
                    alterStatementSuffixCompact204=alterStatementSuffixCompact();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixCompact204.getTree());

                    }
                    break;
                case 11 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:975:5: alterStatementSuffixUpdateStatsCol
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixUpdateStatsCol_in_alterTblPartitionStatementSuffix3816);
                    alterStatementSuffixUpdateStatsCol205=alterStatementSuffixUpdateStatsCol();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixUpdateStatsCol205.getTree());

                    }
                    break;
                case 12 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:976:5: alterStatementSuffixRenameCol
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixRenameCol_in_alterTblPartitionStatementSuffix3822);
                    alterStatementSuffixRenameCol206=alterStatementSuffixRenameCol();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixRenameCol206.getTree());

                    }
                    break;
                case 13 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:977:5: alterStatementSuffixAddCol
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixAddCol_in_alterTblPartitionStatementSuffix3828);
                    alterStatementSuffixAddCol207=alterStatementSuffixAddCol();

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixAddCol207.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterTblPartitionStatementSuffix"


    public static class alterStatementPartitionKeyType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementPartitionKeyType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:980:1: alterStatementPartitionKeyType : KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN -> ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType ) ;
    public final HiveParser.alterStatementPartitionKeyType_return alterStatementPartitionKeyType() throws RecognitionException {
        HiveParser.alterStatementPartitionKeyType_return retval = new HiveParser.alterStatementPartitionKeyType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_PARTITION208=null;
        Token KW_COLUMN209=null;
        Token LPAREN210=null;
        Token RPAREN212=null;
        HiveParser.columnNameType_return columnNameType211 =null;


        CommonTree KW_PARTITION208_tree=null;
        CommonTree KW_COLUMN209_tree=null;
        CommonTree LPAREN210_tree=null;
        CommonTree RPAREN212_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
        RewriteRuleTokenStream stream_KW_PARTITION=new RewriteRuleTokenStream(adaptor,"token KW_PARTITION");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_columnNameType=new RewriteRuleSubtreeStream(adaptor,"rule columnNameType");
        msgs.push("alter partition key type"); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:983:2: ( KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN -> ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:983:4: KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
            {
            KW_PARTITION208=(Token)match(input,KW_PARTITION,FOLLOW_KW_PARTITION_in_alterStatementPartitionKeyType3850);  
            stream_KW_PARTITION.add(KW_PARTITION208);


            KW_COLUMN209=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementPartitionKeyType3852);  
            stream_KW_COLUMN.add(KW_COLUMN209);


            LPAREN210=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_alterStatementPartitionKeyType3854);  
            stream_LPAREN.add(LPAREN210);


            pushFollow(FOLLOW_columnNameType_in_alterStatementPartitionKeyType3856);
            columnNameType211=columnNameType();

            state._fsp--;

            stream_columnNameType.add(columnNameType211.getTree());

            RPAREN212=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_alterStatementPartitionKeyType3858);  
            stream_RPAREN.add(RPAREN212);


            // AST REWRITE
            // elements: columnNameType
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 984:2: -> ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:984:5: ^( TOK_ALTERTABLE_PARTCOLTYPE columnNameType )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_PARTCOLTYPE, "TOK_ALTERTABLE_PARTCOLTYPE")
                , root_1);

                adaptor.addChild(root_1, stream_columnNameType.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            msgs.pop();
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementPartitionKeyType"


    public static class alterViewStatementSuffix_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterViewStatementSuffix"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:987:1: alterViewStatementSuffix : ( alterViewSuffixProperties | alterStatementSuffixRename[false] | alterStatementSuffixAddPartitions[false] | alterStatementSuffixDropPartitions[false] | selectStatementWithCTE );
    public final HiveParser.alterViewStatementSuffix_return alterViewStatementSuffix() throws RecognitionException {
        HiveParser.alterViewStatementSuffix_return retval = new HiveParser.alterViewStatementSuffix_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.alterViewSuffixProperties_return alterViewSuffixProperties213 =null;

        HiveParser.alterStatementSuffixRename_return alterStatementSuffixRename214 =null;

        HiveParser.alterStatementSuffixAddPartitions_return alterStatementSuffixAddPartitions215 =null;

        HiveParser.alterStatementSuffixDropPartitions_return alterStatementSuffixDropPartitions216 =null;

        HiveParser.selectStatementWithCTE_return selectStatementWithCTE217 =null;



         pushMsg("alter view statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:990:5: ( alterViewSuffixProperties | alterStatementSuffixRename[false] | alterStatementSuffixAddPartitions[false] | alterStatementSuffixDropPartitions[false] | selectStatementWithCTE )
            int alt56=5;
            switch ( input.LA(1) ) {
            case KW_SET:
            case KW_UNSET:
                {
                alt56=1;
                }
                break;
            case KW_RENAME:
                {
                alt56=2;
                }
                break;
            case KW_ADD:
                {
                alt56=3;
                }
                break;
            case KW_DROP:
                {
                alt56=4;
                }
                break;
            case KW_MAP:
            case KW_REDUCE:
            case KW_SELECT:
            case KW_WITH:
                {
                alt56=5;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 56, 0, input);

                throw nvae;

            }

            switch (alt56) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:990:7: alterViewSuffixProperties
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterViewSuffixProperties_in_alterViewStatementSuffix3891);
                    alterViewSuffixProperties213=alterViewSuffixProperties();

                    state._fsp--;

                    adaptor.addChild(root_0, alterViewSuffixProperties213.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:991:7: alterStatementSuffixRename[false]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixRename_in_alterViewStatementSuffix3899);
                    alterStatementSuffixRename214=alterStatementSuffixRename(false);

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixRename214.getTree());

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:992:7: alterStatementSuffixAddPartitions[false]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixAddPartitions_in_alterViewStatementSuffix3908);
                    alterStatementSuffixAddPartitions215=alterStatementSuffixAddPartitions(false);

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixAddPartitions215.getTree());

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:993:7: alterStatementSuffixDropPartitions[false]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterStatementSuffixDropPartitions_in_alterViewStatementSuffix3917);
                    alterStatementSuffixDropPartitions216=alterStatementSuffixDropPartitions(false);

                    state._fsp--;

                    adaptor.addChild(root_0, alterStatementSuffixDropPartitions216.getTree());

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:994:7: selectStatementWithCTE
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_selectStatementWithCTE_in_alterViewStatementSuffix3926);
                    selectStatementWithCTE217=selectStatementWithCTE();

                    state._fsp--;

                    adaptor.addChild(root_0, selectStatementWithCTE217.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterViewStatementSuffix"


    public static class alterIndexStatementSuffix_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterIndexStatementSuffix"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:997:1: alterIndexStatementSuffix : indexName= identifier KW_ON tableName ( partitionSpec )? ( KW_REBUILD -> ^( TOK_ALTERINDEX_REBUILD tableName $indexName ( partitionSpec )? ) | KW_SET KW_IDXPROPERTIES indexProperties -> ^( TOK_ALTERINDEX_PROPERTIES tableName $indexName indexProperties ) ) ;
    public final HiveParser.alterIndexStatementSuffix_return alterIndexStatementSuffix() throws RecognitionException {
        HiveParser.alterIndexStatementSuffix_return retval = new HiveParser.alterIndexStatementSuffix_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ON218=null;
        Token KW_REBUILD221=null;
        Token KW_SET222=null;
        Token KW_IDXPROPERTIES223=null;
        HiveParser_IdentifiersParser.identifier_return indexName =null;

        HiveParser_FromClauseParser.tableName_return tableName219 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec220 =null;

        HiveParser.indexProperties_return indexProperties224 =null;


        CommonTree KW_ON218_tree=null;
        CommonTree KW_REBUILD221_tree=null;
        CommonTree KW_SET222_tree=null;
        CommonTree KW_IDXPROPERTIES223_tree=null;
        RewriteRuleTokenStream stream_KW_REBUILD=new RewriteRuleTokenStream(adaptor,"token KW_REBUILD");
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleTokenStream stream_KW_IDXPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_IDXPROPERTIES");
        RewriteRuleSubtreeStream stream_indexProperties=new RewriteRuleSubtreeStream(adaptor,"rule indexProperties");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("alter index statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1000:5: (indexName= identifier KW_ON tableName ( partitionSpec )? ( KW_REBUILD -> ^( TOK_ALTERINDEX_REBUILD tableName $indexName ( partitionSpec )? ) | KW_SET KW_IDXPROPERTIES indexProperties -> ^( TOK_ALTERINDEX_PROPERTIES tableName $indexName indexProperties ) ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1000:7: indexName= identifier KW_ON tableName ( partitionSpec )? ( KW_REBUILD -> ^( TOK_ALTERINDEX_REBUILD tableName $indexName ( partitionSpec )? ) | KW_SET KW_IDXPROPERTIES indexProperties -> ^( TOK_ALTERINDEX_PROPERTIES tableName $indexName indexProperties ) )
            {
            pushFollow(FOLLOW_identifier_in_alterIndexStatementSuffix3955);
            indexName=identifier();

            state._fsp--;

            stream_identifier.add(indexName.getTree());

            KW_ON218=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_alterIndexStatementSuffix3957);  
            stream_KW_ON.add(KW_ON218);


            pushFollow(FOLLOW_tableName_in_alterIndexStatementSuffix3959);
            tableName219=tableName();

            state._fsp--;

            stream_tableName.add(tableName219.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1000:44: ( partitionSpec )?
            int alt57=2;
            switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt57=1;
                    }
                    break;
            }

            switch (alt57) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1000:44: partitionSpec
                    {
                    pushFollow(FOLLOW_partitionSpec_in_alterIndexStatementSuffix3961);
                    partitionSpec220=partitionSpec();

                    state._fsp--;

                    stream_partitionSpec.add(partitionSpec220.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1001:5: ( KW_REBUILD -> ^( TOK_ALTERINDEX_REBUILD tableName $indexName ( partitionSpec )? ) | KW_SET KW_IDXPROPERTIES indexProperties -> ^( TOK_ALTERINDEX_PROPERTIES tableName $indexName indexProperties ) )
            int alt58=2;
            switch ( input.LA(1) ) {
            case KW_REBUILD:
                {
                alt58=1;
                }
                break;
            case KW_SET:
                {
                alt58=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 58, 0, input);

                throw nvae;

            }

            switch (alt58) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1002:7: KW_REBUILD
                    {
                    KW_REBUILD221=(Token)match(input,KW_REBUILD,FOLLOW_KW_REBUILD_in_alterIndexStatementSuffix3976);  
                    stream_KW_REBUILD.add(KW_REBUILD221);


                    // AST REWRITE
                    // elements: partitionSpec, tableName, indexName
                    // token labels: 
                    // rule labels: retval, indexName
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_indexName=new RewriteRuleSubtreeStream(adaptor,"rule indexName",indexName!=null?indexName.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1003:7: -> ^( TOK_ALTERINDEX_REBUILD tableName $indexName ( partitionSpec )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1003:9: ^( TOK_ALTERINDEX_REBUILD tableName $indexName ( partitionSpec )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERINDEX_REBUILD, "TOK_ALTERINDEX_REBUILD")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        adaptor.addChild(root_1, stream_indexName.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1003:55: ( partitionSpec )?
                        if ( stream_partitionSpec.hasNext() ) {
                            adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                        }
                        stream_partitionSpec.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1005:7: KW_SET KW_IDXPROPERTIES indexProperties
                    {
                    KW_SET222=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterIndexStatementSuffix4009);  
                    stream_KW_SET.add(KW_SET222);


                    KW_IDXPROPERTIES223=(Token)match(input,KW_IDXPROPERTIES,FOLLOW_KW_IDXPROPERTIES_in_alterIndexStatementSuffix4011);  
                    stream_KW_IDXPROPERTIES.add(KW_IDXPROPERTIES223);


                    pushFollow(FOLLOW_indexProperties_in_alterIndexStatementSuffix4019);
                    indexProperties224=indexProperties();

                    state._fsp--;

                    stream_indexProperties.add(indexProperties224.getTree());

                    // AST REWRITE
                    // elements: indexProperties, indexName, tableName
                    // token labels: 
                    // rule labels: retval, indexName
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_indexName=new RewriteRuleSubtreeStream(adaptor,"rule indexName",indexName!=null?indexName.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1007:7: -> ^( TOK_ALTERINDEX_PROPERTIES tableName $indexName indexProperties )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1007:9: ^( TOK_ALTERINDEX_PROPERTIES tableName $indexName indexProperties )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERINDEX_PROPERTIES, "TOK_ALTERINDEX_PROPERTIES")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        adaptor.addChild(root_1, stream_indexName.nextTree());

                        adaptor.addChild(root_1, stream_indexProperties.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }


            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterIndexStatementSuffix"


    public static class alterDatabaseStatementSuffix_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterDatabaseStatementSuffix"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1011:1: alterDatabaseStatementSuffix : ( alterDatabaseSuffixProperties | alterDatabaseSuffixSetOwner );
    public final HiveParser.alterDatabaseStatementSuffix_return alterDatabaseStatementSuffix() throws RecognitionException {
        HiveParser.alterDatabaseStatementSuffix_return retval = new HiveParser.alterDatabaseStatementSuffix_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.alterDatabaseSuffixProperties_return alterDatabaseSuffixProperties225 =null;

        HiveParser.alterDatabaseSuffixSetOwner_return alterDatabaseSuffixSetOwner226 =null;



         pushMsg("alter database statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1014:5: ( alterDatabaseSuffixProperties | alterDatabaseSuffixSetOwner )
            int alt59=2;
            switch ( input.LA(1) ) {
            case Identifier:
                {
                switch ( input.LA(2) ) {
                case KW_SET:
                    {
                    switch ( input.LA(3) ) {
                    case KW_DBPROPERTIES:
                        {
                        alt59=1;
                        }
                        break;
                    case KW_OWNER:
                        {
                        alt59=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 59, 3, input);

                        throw nvae;

                    }

                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 59, 1, input);

                    throw nvae;

                }

                }
                break;
            case KW_ADD:
            case KW_ADMIN:
            case KW_AFTER:
            case KW_ALL:
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_ARCHIVE:
            case KW_ARRAY:
            case KW_AS:
            case KW_ASC:
            case KW_AUTHORIZATION:
            case KW_BEFORE:
            case KW_BETWEEN:
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_BOTH:
            case KW_BUCKET:
            case KW_BUCKETS:
            case KW_BY:
            case KW_CASCADE:
            case KW_CHANGE:
            case KW_CLUSTER:
            case KW_CLUSTERED:
            case KW_CLUSTERSTATUS:
            case KW_COLLECTION:
            case KW_COLUMNS:
            case KW_COMMENT:
            case KW_COMPACT:
            case KW_COMPACTIONS:
            case KW_COMPUTE:
            case KW_CONCATENATE:
            case KW_CONTINUE:
            case KW_CREATE:
            case KW_CUBE:
            case KW_CURSOR:
            case KW_DATA:
            case KW_DATABASES:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DBPROPERTIES:
            case KW_DECIMAL:
            case KW_DEFAULT:
            case KW_DEFERRED:
            case KW_DEFINED:
            case KW_DELETE:
            case KW_DELIMITED:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DIRECTORIES:
            case KW_DIRECTORY:
            case KW_DISABLE:
            case KW_DISTRIBUTE:
            case KW_DOUBLE:
            case KW_DROP:
            case KW_ELEM_TYPE:
            case KW_ENABLE:
            case KW_ESCAPED:
            case KW_EXCLUSIVE:
            case KW_EXISTS:
            case KW_EXPLAIN:
            case KW_EXPORT:
            case KW_EXTERNAL:
            case KW_FALSE:
            case KW_FETCH:
            case KW_FIELDS:
            case KW_FILE:
            case KW_FILEFORMAT:
            case KW_FIRST:
            case KW_FLOAT:
            case KW_FOR:
            case KW_FORMAT:
            case KW_FORMATTED:
            case KW_FULL:
            case KW_FUNCTIONS:
            case KW_GRANT:
            case KW_GROUP:
            case KW_GROUPING:
            case KW_HOLD_DDLTIME:
            case KW_IDXPROPERTIES:
            case KW_IGNORE:
            case KW_IMPORT:
            case KW_IN:
            case KW_INDEX:
            case KW_INDEXES:
            case KW_INNER:
            case KW_INPATH:
            case KW_INPUTDRIVER:
            case KW_INPUTFORMAT:
            case KW_INSERT:
            case KW_INT:
            case KW_INTERSECT:
            case KW_INTO:
            case KW_IS:
            case KW_ITEMS:
            case KW_JAR:
            case KW_KEYS:
            case KW_KEY_TYPE:
            case KW_LATERAL:
            case KW_LEFT:
            case KW_LIKE:
            case KW_LIMIT:
            case KW_LINES:
            case KW_LOAD:
            case KW_LOCAL:
            case KW_LOCATION:
            case KW_LOCK:
            case KW_LOCKS:
            case KW_LOGICAL:
            case KW_LONG:
            case KW_MAPJOIN:
            case KW_MATERIALIZED:
            case KW_MINUS:
            case KW_MSCK:
            case KW_NONE:
            case KW_NOSCAN:
            case KW_NO_DROP:
            case KW_NULL:
            case KW_OF:
            case KW_OFFLINE:
            case KW_OPTION:
            case KW_ORDER:
            case KW_OUT:
            case KW_OUTER:
            case KW_OUTPUTDRIVER:
            case KW_OUTPUTFORMAT:
            case KW_OVERWRITE:
            case KW_OWNER:
            case KW_PARTITION:
            case KW_PARTITIONED:
            case KW_PARTITIONS:
            case KW_PERCENT:
            case KW_PLUS:
            case KW_PRETTY:
            case KW_PRINCIPALS:
            case KW_PROCEDURE:
            case KW_PROTECTION:
            case KW_PURGE:
            case KW_RANGE:
            case KW_READ:
            case KW_READONLY:
            case KW_READS:
            case KW_REBUILD:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REGEXP:
            case KW_RENAME:
            case KW_REPAIR:
            case KW_REPLACE:
            case KW_RESTRICT:
            case KW_REVOKE:
            case KW_REWRITE:
            case KW_RIGHT:
            case KW_RLIKE:
            case KW_ROLE:
            case KW_ROLES:
            case KW_ROLLUP:
            case KW_ROW:
            case KW_ROWS:
            case KW_SCHEMA:
            case KW_SCHEMAS:
            case KW_SEMI:
            case KW_SERDE:
            case KW_SERDEPROPERTIES:
            case KW_SET:
            case KW_SETS:
            case KW_SHARED:
            case KW_SHOW:
            case KW_SHOW_DATABASE:
            case KW_SKEWED:
            case KW_SMALLINT:
            case KW_SORT:
            case KW_SORTED:
            case KW_SSL:
            case KW_STATISTICS:
            case KW_STORED:
            case KW_STREAMTABLE:
            case KW_STRING:
            case KW_STRUCT:
            case KW_TABLE:
            case KW_TABLES:
            case KW_TBLPROPERTIES:
            case KW_TEMPORARY:
            case KW_TERMINATED:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_TO:
            case KW_TOUCH:
            case KW_TRANSACTIONS:
            case KW_TRIGGER:
            case KW_TRUE:
            case KW_TRUNCATE:
            case KW_UNARCHIVE:
            case KW_UNDO:
            case KW_UNION:
            case KW_UNIONTYPE:
            case KW_UNLOCK:
            case KW_UNSET:
            case KW_UNSIGNED:
            case KW_UPDATE:
            case KW_USE:
            case KW_USER:
            case KW_USING:
            case KW_UTC:
            case KW_UTCTIMESTAMP:
            case KW_VALUES:
            case KW_VALUE_TYPE:
            case KW_VIEW:
            case KW_WHILE:
            case KW_WITH:
                {
                switch ( input.LA(2) ) {
                case KW_SET:
                    {
                    switch ( input.LA(3) ) {
                    case KW_DBPROPERTIES:
                        {
                        alt59=1;
                        }
                        break;
                    case KW_OWNER:
                        {
                        alt59=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 59, 4, input);

                        throw nvae;

                    }

                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 59, 2, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 59, 0, input);

                throw nvae;

            }

            switch (alt59) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1014:7: alterDatabaseSuffixProperties
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterDatabaseSuffixProperties_in_alterDatabaseStatementSuffix4070);
                    alterDatabaseSuffixProperties225=alterDatabaseSuffixProperties();

                    state._fsp--;

                    adaptor.addChild(root_0, alterDatabaseSuffixProperties225.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1015:7: alterDatabaseSuffixSetOwner
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_alterDatabaseSuffixSetOwner_in_alterDatabaseStatementSuffix4078);
                    alterDatabaseSuffixSetOwner226=alterDatabaseSuffixSetOwner();

                    state._fsp--;

                    adaptor.addChild(root_0, alterDatabaseSuffixSetOwner226.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterDatabaseStatementSuffix"


    public static class alterDatabaseSuffixProperties_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterDatabaseSuffixProperties"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1018:1: alterDatabaseSuffixProperties : name= identifier KW_SET KW_DBPROPERTIES dbProperties -> ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties ) ;
    public final HiveParser.alterDatabaseSuffixProperties_return alterDatabaseSuffixProperties() throws RecognitionException {
        HiveParser.alterDatabaseSuffixProperties_return retval = new HiveParser.alterDatabaseSuffixProperties_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET227=null;
        Token KW_DBPROPERTIES228=null;
        HiveParser_IdentifiersParser.identifier_return name =null;

        HiveParser.dbProperties_return dbProperties229 =null;


        CommonTree KW_SET227_tree=null;
        CommonTree KW_DBPROPERTIES228_tree=null;
        RewriteRuleTokenStream stream_KW_DBPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_DBPROPERTIES");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleSubtreeStream stream_dbProperties=new RewriteRuleSubtreeStream(adaptor,"rule dbProperties");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("alter database properties statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1021:5: (name= identifier KW_SET KW_DBPROPERTIES dbProperties -> ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1021:7: name= identifier KW_SET KW_DBPROPERTIES dbProperties
            {
            pushFollow(FOLLOW_identifier_in_alterDatabaseSuffixProperties4107);
            name=identifier();

            state._fsp--;

            stream_identifier.add(name.getTree());

            KW_SET227=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterDatabaseSuffixProperties4109);  
            stream_KW_SET.add(KW_SET227);


            KW_DBPROPERTIES228=(Token)match(input,KW_DBPROPERTIES,FOLLOW_KW_DBPROPERTIES_in_alterDatabaseSuffixProperties4111);  
            stream_KW_DBPROPERTIES.add(KW_DBPROPERTIES228);


            pushFollow(FOLLOW_dbProperties_in_alterDatabaseSuffixProperties4113);
            dbProperties229=dbProperties();

            state._fsp--;

            stream_dbProperties.add(dbProperties229.getTree());

            // AST REWRITE
            // elements: dbProperties, name
            // token labels: 
            // rule labels: retval, name
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1022:5: -> ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1022:8: ^( TOK_ALTERDATABASE_PROPERTIES $name dbProperties )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERDATABASE_PROPERTIES, "TOK_ALTERDATABASE_PROPERTIES")
                , root_1);

                adaptor.addChild(root_1, stream_name.nextTree());

                adaptor.addChild(root_1, stream_dbProperties.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterDatabaseSuffixProperties"


    public static class alterDatabaseSuffixSetOwner_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterDatabaseSuffixSetOwner"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1025:1: alterDatabaseSuffixSetOwner : dbName= identifier KW_SET KW_OWNER principalName -> ^( TOK_ALTERDATABASE_OWNER $dbName principalName ) ;
    public final HiveParser.alterDatabaseSuffixSetOwner_return alterDatabaseSuffixSetOwner() throws RecognitionException {
        HiveParser.alterDatabaseSuffixSetOwner_return retval = new HiveParser.alterDatabaseSuffixSetOwner_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET230=null;
        Token KW_OWNER231=null;
        HiveParser_IdentifiersParser.identifier_return dbName =null;

        HiveParser.principalName_return principalName232 =null;


        CommonTree KW_SET230_tree=null;
        CommonTree KW_OWNER231_tree=null;
        RewriteRuleTokenStream stream_KW_OWNER=new RewriteRuleTokenStream(adaptor,"token KW_OWNER");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("alter database set owner", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1028:5: (dbName= identifier KW_SET KW_OWNER principalName -> ^( TOK_ALTERDATABASE_OWNER $dbName principalName ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1028:7: dbName= identifier KW_SET KW_OWNER principalName
            {
            pushFollow(FOLLOW_identifier_in_alterDatabaseSuffixSetOwner4157);
            dbName=identifier();

            state._fsp--;

            stream_identifier.add(dbName.getTree());

            KW_SET230=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterDatabaseSuffixSetOwner4159);  
            stream_KW_SET.add(KW_SET230);


            KW_OWNER231=(Token)match(input,KW_OWNER,FOLLOW_KW_OWNER_in_alterDatabaseSuffixSetOwner4161);  
            stream_KW_OWNER.add(KW_OWNER231);


            pushFollow(FOLLOW_principalName_in_alterDatabaseSuffixSetOwner4163);
            principalName232=principalName();

            state._fsp--;

            stream_principalName.add(principalName232.getTree());

            // AST REWRITE
            // elements: dbName, principalName
            // token labels: 
            // rule labels: retval, dbName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1029:5: -> ^( TOK_ALTERDATABASE_OWNER $dbName principalName )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1029:8: ^( TOK_ALTERDATABASE_OWNER $dbName principalName )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERDATABASE_OWNER, "TOK_ALTERDATABASE_OWNER")
                , root_1);

                adaptor.addChild(root_1, stream_dbName.nextTree());

                adaptor.addChild(root_1, stream_principalName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterDatabaseSuffixSetOwner"


    public static class alterStatementSuffixRename_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixRename"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1032:1: alterStatementSuffixRename[boolean table] : KW_RENAME KW_TO tableName -> { table }? ^( TOK_ALTERTABLE_RENAME tableName ) -> ^( TOK_ALTERVIEW_RENAME tableName ) ;
    public final HiveParser.alterStatementSuffixRename_return alterStatementSuffixRename(boolean table) throws RecognitionException {
        HiveParser.alterStatementSuffixRename_return retval = new HiveParser.alterStatementSuffixRename_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_RENAME233=null;
        Token KW_TO234=null;
        HiveParser_FromClauseParser.tableName_return tableName235 =null;


        CommonTree KW_RENAME233_tree=null;
        CommonTree KW_TO234_tree=null;
        RewriteRuleTokenStream stream_KW_RENAME=new RewriteRuleTokenStream(adaptor,"token KW_RENAME");
        RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
         pushMsg("rename statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1035:5: ( KW_RENAME KW_TO tableName -> { table }? ^( TOK_ALTERTABLE_RENAME tableName ) -> ^( TOK_ALTERVIEW_RENAME tableName ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1035:7: KW_RENAME KW_TO tableName
            {
            KW_RENAME233=(Token)match(input,KW_RENAME,FOLLOW_KW_RENAME_in_alterStatementSuffixRename4206);  
            stream_KW_RENAME.add(KW_RENAME233);


            KW_TO234=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_alterStatementSuffixRename4208);  
            stream_KW_TO.add(KW_TO234);


            pushFollow(FOLLOW_tableName_in_alterStatementSuffixRename4210);
            tableName235=tableName();

            state._fsp--;

            stream_tableName.add(tableName235.getTree());

            // AST REWRITE
            // elements: tableName, tableName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1036:5: -> { table }? ^( TOK_ALTERTABLE_RENAME tableName )
            if ( table ) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1036:19: ^( TOK_ALTERTABLE_RENAME tableName )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_RENAME, "TOK_ALTERTABLE_RENAME")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1037:5: -> ^( TOK_ALTERVIEW_RENAME tableName )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1037:19: ^( TOK_ALTERVIEW_RENAME tableName )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERVIEW_RENAME, "TOK_ALTERVIEW_RENAME")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixRename"


    public static class alterStatementSuffixAddCol_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixAddCol"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1040:1: alterStatementSuffixAddCol : (add= KW_ADD |replace= KW_REPLACE ) KW_COLUMNS LPAREN columnNameTypeList RPAREN -> {$add != null}? ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList ) -> ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList ) ;
    public final HiveParser.alterStatementSuffixAddCol_return alterStatementSuffixAddCol() throws RecognitionException {
        HiveParser.alterStatementSuffixAddCol_return retval = new HiveParser.alterStatementSuffixAddCol_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token add=null;
        Token replace=null;
        Token KW_COLUMNS236=null;
        Token LPAREN237=null;
        Token RPAREN239=null;
        HiveParser.columnNameTypeList_return columnNameTypeList238 =null;


        CommonTree add_tree=null;
        CommonTree replace_tree=null;
        CommonTree KW_COLUMNS236_tree=null;
        CommonTree LPAREN237_tree=null;
        CommonTree RPAREN239_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_REPLACE=new RewriteRuleTokenStream(adaptor,"token KW_REPLACE");
        RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleTokenStream stream_KW_ADD=new RewriteRuleTokenStream(adaptor,"token KW_ADD");
        RewriteRuleSubtreeStream stream_columnNameTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeList");
         pushMsg("add column statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1043:5: ( (add= KW_ADD |replace= KW_REPLACE ) KW_COLUMNS LPAREN columnNameTypeList RPAREN -> {$add != null}? ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList ) -> ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1043:7: (add= KW_ADD |replace= KW_REPLACE ) KW_COLUMNS LPAREN columnNameTypeList RPAREN
            {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1043:7: (add= KW_ADD |replace= KW_REPLACE )
            int alt60=2;
            switch ( input.LA(1) ) {
            case KW_ADD:
                {
                alt60=1;
                }
                break;
            case KW_REPLACE:
                {
                alt60=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 60, 0, input);

                throw nvae;

            }

            switch (alt60) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1043:8: add= KW_ADD
                    {
                    add=(Token)match(input,KW_ADD,FOLLOW_KW_ADD_in_alterStatementSuffixAddCol4277);  
                    stream_KW_ADD.add(add);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1043:21: replace= KW_REPLACE
                    {
                    replace=(Token)match(input,KW_REPLACE,FOLLOW_KW_REPLACE_in_alterStatementSuffixAddCol4283);  
                    stream_KW_REPLACE.add(replace);


                    }
                    break;

            }


            KW_COLUMNS236=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_alterStatementSuffixAddCol4286);  
            stream_KW_COLUMNS.add(KW_COLUMNS236);


            LPAREN237=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_alterStatementSuffixAddCol4288);  
            stream_LPAREN.add(LPAREN237);


            pushFollow(FOLLOW_columnNameTypeList_in_alterStatementSuffixAddCol4290);
            columnNameTypeList238=columnNameTypeList();

            state._fsp--;

            stream_columnNameTypeList.add(columnNameTypeList238.getTree());

            RPAREN239=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_alterStatementSuffixAddCol4292);  
            stream_RPAREN.add(RPAREN239);


            // AST REWRITE
            // elements: columnNameTypeList, columnNameTypeList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1044:5: -> {$add != null}? ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList )
            if (add != null) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1044:24: ^( TOK_ALTERTABLE_ADDCOLS columnNameTypeList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_ADDCOLS, "TOK_ALTERTABLE_ADDCOLS")
                , root_1);

                adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1045:5: -> ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1045:24: ^( TOK_ALTERTABLE_REPLACECOLS columnNameTypeList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_REPLACECOLS, "TOK_ALTERTABLE_REPLACECOLS")
                , root_1);

                adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixAddCol"


    public static class alterStatementSuffixRenameCol_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixRenameCol"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1048:1: alterStatementSuffixRenameCol : KW_CHANGE ( KW_COLUMN )? oldName= identifier newName= identifier colType ( KW_COMMENT comment= StringLiteral )? ( alterStatementChangeColPosition )? -> ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterStatementChangeColPosition )? ) ;
    public final HiveParser.alterStatementSuffixRenameCol_return alterStatementSuffixRenameCol() throws RecognitionException {
        HiveParser.alterStatementSuffixRenameCol_return retval = new HiveParser.alterStatementSuffixRenameCol_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_CHANGE240=null;
        Token KW_COLUMN241=null;
        Token KW_COMMENT243=null;
        HiveParser_IdentifiersParser.identifier_return oldName =null;

        HiveParser_IdentifiersParser.identifier_return newName =null;

        HiveParser.colType_return colType242 =null;

        HiveParser.alterStatementChangeColPosition_return alterStatementChangeColPosition244 =null;


        CommonTree comment_tree=null;
        CommonTree KW_CHANGE240_tree=null;
        CommonTree KW_COLUMN241_tree=null;
        CommonTree KW_COMMENT243_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
        RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
        RewriteRuleTokenStream stream_KW_CHANGE=new RewriteRuleTokenStream(adaptor,"token KW_CHANGE");
        RewriteRuleSubtreeStream stream_alterStatementChangeColPosition=new RewriteRuleSubtreeStream(adaptor,"rule alterStatementChangeColPosition");
        RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("rename column name", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:5: ( KW_CHANGE ( KW_COLUMN )? oldName= identifier newName= identifier colType ( KW_COMMENT comment= StringLiteral )? ( alterStatementChangeColPosition )? -> ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterStatementChangeColPosition )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:7: KW_CHANGE ( KW_COLUMN )? oldName= identifier newName= identifier colType ( KW_COMMENT comment= StringLiteral )? ( alterStatementChangeColPosition )?
            {
            KW_CHANGE240=(Token)match(input,KW_CHANGE,FOLLOW_KW_CHANGE_in_alterStatementSuffixRenameCol4361);  
            stream_KW_CHANGE.add(KW_CHANGE240);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:17: ( KW_COLUMN )?
            int alt61=2;
            switch ( input.LA(1) ) {
                case KW_COLUMN:
                    {
                    alt61=1;
                    }
                    break;
            }

            switch (alt61) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:17: KW_COLUMN
                    {
                    KW_COLUMN241=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementSuffixRenameCol4363);  
                    stream_KW_COLUMN.add(KW_COLUMN241);


                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_alterStatementSuffixRenameCol4368);
            oldName=identifier();

            state._fsp--;

            stream_identifier.add(oldName.getTree());

            pushFollow(FOLLOW_identifier_in_alterStatementSuffixRenameCol4372);
            newName=identifier();

            state._fsp--;

            stream_identifier.add(newName.getTree());

            pushFollow(FOLLOW_colType_in_alterStatementSuffixRenameCol4374);
            colType242=colType();

            state._fsp--;

            stream_colType.add(colType242.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:74: ( KW_COMMENT comment= StringLiteral )?
            int alt62=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt62=1;
                    }
                    break;
            }

            switch (alt62) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:75: KW_COMMENT comment= StringLiteral
                    {
                    KW_COMMENT243=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_alterStatementSuffixRenameCol4377);  
                    stream_KW_COMMENT.add(KW_COMMENT243);


                    comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixRenameCol4381);  
                    stream_StringLiteral.add(comment);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:110: ( alterStatementChangeColPosition )?
            int alt63=2;
            switch ( input.LA(1) ) {
                case KW_AFTER:
                case KW_FIRST:
                    {
                    alt63=1;
                    }
                    break;
            }

            switch (alt63) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1051:110: alterStatementChangeColPosition
                    {
                    pushFollow(FOLLOW_alterStatementChangeColPosition_in_alterStatementSuffixRenameCol4385);
                    alterStatementChangeColPosition244=alterStatementChangeColPosition();

                    state._fsp--;

                    stream_alterStatementChangeColPosition.add(alterStatementChangeColPosition244.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: comment, alterStatementChangeColPosition, newName, colType, oldName
            // token labels: comment
            // rule labels: retval, newName, oldName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_newName=new RewriteRuleSubtreeStream(adaptor,"rule newName",newName!=null?newName.tree:null);
            RewriteRuleSubtreeStream stream_oldName=new RewriteRuleSubtreeStream(adaptor,"rule oldName",oldName!=null?oldName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1052:5: -> ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterStatementChangeColPosition )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1052:7: ^( TOK_ALTERTABLE_RENAMECOL $oldName $newName colType ( $comment)? ( alterStatementChangeColPosition )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_RENAMECOL, "TOK_ALTERTABLE_RENAMECOL")
                , root_1);

                adaptor.addChild(root_1, stream_oldName.nextTree());

                adaptor.addChild(root_1, stream_newName.nextTree());

                adaptor.addChild(root_1, stream_colType.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1052:61: ( $comment)?
                if ( stream_comment.hasNext() ) {
                    adaptor.addChild(root_1, stream_comment.nextNode());

                }
                stream_comment.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1052:70: ( alterStatementChangeColPosition )?
                if ( stream_alterStatementChangeColPosition.hasNext() ) {
                    adaptor.addChild(root_1, stream_alterStatementChangeColPosition.nextTree());

                }
                stream_alterStatementChangeColPosition.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixRenameCol"


    public static class alterStatementSuffixUpdateStatsCol_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixUpdateStatsCol"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1055:1: alterStatementSuffixUpdateStatsCol : KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) ;
    public final HiveParser.alterStatementSuffixUpdateStatsCol_return alterStatementSuffixUpdateStatsCol() throws RecognitionException {
        HiveParser.alterStatementSuffixUpdateStatsCol_return retval = new HiveParser.alterStatementSuffixUpdateStatsCol_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_UPDATE245=null;
        Token KW_STATISTICS246=null;
        Token KW_FOR247=null;
        Token KW_COLUMN248=null;
        Token KW_SET249=null;
        Token KW_COMMENT251=null;
        HiveParser_IdentifiersParser.identifier_return colName =null;

        HiveParser.tableProperties_return tableProperties250 =null;


        CommonTree comment_tree=null;
        CommonTree KW_UPDATE245_tree=null;
        CommonTree KW_STATISTICS246_tree=null;
        CommonTree KW_FOR247_tree=null;
        CommonTree KW_COLUMN248_tree=null;
        CommonTree KW_SET249_tree=null;
        CommonTree KW_COMMENT251_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
        RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
        RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
        RewriteRuleTokenStream stream_KW_STATISTICS=new RewriteRuleTokenStream(adaptor,"token KW_STATISTICS");
        RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("update column statistics", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1058:5: ( KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1058:7: KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )?
            {
            KW_UPDATE245=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_alterStatementSuffixUpdateStatsCol4437);  
            stream_KW_UPDATE.add(KW_UPDATE245);


            KW_STATISTICS246=(Token)match(input,KW_STATISTICS,FOLLOW_KW_STATISTICS_in_alterStatementSuffixUpdateStatsCol4439);  
            stream_KW_STATISTICS.add(KW_STATISTICS246);


            KW_FOR247=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_alterStatementSuffixUpdateStatsCol4441);  
            stream_KW_FOR.add(KW_FOR247);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1058:38: ( KW_COLUMN )?
            int alt64=2;
            switch ( input.LA(1) ) {
                case KW_COLUMN:
                    {
                    alt64=1;
                    }
                    break;
            }

            switch (alt64) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1058:38: KW_COLUMN
                    {
                    KW_COLUMN248=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementSuffixUpdateStatsCol4443);  
                    stream_KW_COLUMN.add(KW_COLUMN248);


                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_alterStatementSuffixUpdateStatsCol4448);
            colName=identifier();

            state._fsp--;

            stream_identifier.add(colName.getTree());

            KW_SET249=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixUpdateStatsCol4450);  
            stream_KW_SET.add(KW_SET249);


            pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixUpdateStatsCol4452);
            tableProperties250=tableProperties();

            state._fsp--;

            stream_tableProperties.add(tableProperties250.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1058:91: ( KW_COMMENT comment= StringLiteral )?
            int alt65=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt65=1;
                    }
                    break;
            }

            switch (alt65) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1058:92: KW_COMMENT comment= StringLiteral
                    {
                    KW_COMMENT251=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_alterStatementSuffixUpdateStatsCol4455);  
                    stream_KW_COMMENT.add(KW_COMMENT251);


                    comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixUpdateStatsCol4459);  
                    stream_StringLiteral.add(comment);


                    }
                    break;

            }


            // AST REWRITE
            // elements: comment, colName, tableProperties
            // token labels: comment
            // rule labels: retval, colName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1059:5: -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1059:7: ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_UPDATECOLSTATS, "TOK_ALTERTABLE_UPDATECOLSTATS")
                , root_1);

                adaptor.addChild(root_1, stream_colName.nextTree());

                adaptor.addChild(root_1, stream_tableProperties.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1059:65: ( $comment)?
                if ( stream_comment.hasNext() ) {
                    adaptor.addChild(root_1, stream_comment.nextNode());

                }
                stream_comment.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixUpdateStatsCol"


    public static class alterStatementChangeColPosition_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementChangeColPosition"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1062:1: alterStatementChangeColPosition : (first= KW_FIRST | KW_AFTER afterCol= identifier -> {$first != null}? ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION ) -> ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol) );
    public final HiveParser.alterStatementChangeColPosition_return alterStatementChangeColPosition() throws RecognitionException {
        HiveParser.alterStatementChangeColPosition_return retval = new HiveParser.alterStatementChangeColPosition_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token first=null;
        Token KW_AFTER252=null;
        HiveParser_IdentifiersParser.identifier_return afterCol =null;


        CommonTree first_tree=null;
        CommonTree KW_AFTER252_tree=null;
        RewriteRuleTokenStream stream_KW_AFTER=new RewriteRuleTokenStream(adaptor,"token KW_AFTER");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1063:5: (first= KW_FIRST | KW_AFTER afterCol= identifier -> {$first != null}? ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION ) -> ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol) )
            int alt66=2;
            switch ( input.LA(1) ) {
            case KW_FIRST:
                {
                alt66=1;
                }
                break;
            case KW_AFTER:
                {
                alt66=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 66, 0, input);

                throw nvae;

            }

            switch (alt66) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1063:7: first= KW_FIRST
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    first=(Token)match(input,KW_FIRST,FOLLOW_KW_FIRST_in_alterStatementChangeColPosition4498); 
                    first_tree = 
                    (CommonTree)adaptor.create(first)
                    ;
                    adaptor.addChild(root_0, first_tree);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1063:22: KW_AFTER afterCol= identifier
                    {
                    KW_AFTER252=(Token)match(input,KW_AFTER,FOLLOW_KW_AFTER_in_alterStatementChangeColPosition4500);  
                    stream_KW_AFTER.add(KW_AFTER252);


                    pushFollow(FOLLOW_identifier_in_alterStatementChangeColPosition4504);
                    afterCol=identifier();

                    state._fsp--;

                    stream_identifier.add(afterCol.getTree());

                    // AST REWRITE
                    // elements: afterCol
                    // token labels: 
                    // rule labels: retval, afterCol
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_afterCol=new RewriteRuleSubtreeStream(adaptor,"rule afterCol",afterCol!=null?afterCol.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1064:5: -> {$first != null}? ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION )
                    if (first != null) {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1064:25: ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION, "TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }

                    else // 1065:5: -> ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol)
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1065:8: ^( TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol)
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION, "TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION")
                        , root_1);

                        adaptor.addChild(root_1, stream_afterCol.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementChangeColPosition"


    public static class alterStatementSuffixAddPartitions_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixAddPartitions"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1068:1: alterStatementSuffixAddPartitions[boolean table] : KW_ADD ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ -> { table }? ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) -> ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) ;
    public final HiveParser.alterStatementSuffixAddPartitions_return alterStatementSuffixAddPartitions(boolean table) throws RecognitionException {
        HiveParser.alterStatementSuffixAddPartitions_return retval = new HiveParser.alterStatementSuffixAddPartitions_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ADD253=null;
        HiveParser.ifNotExists_return ifNotExists254 =null;

        HiveParser.alterStatementSuffixAddPartitionsElement_return alterStatementSuffixAddPartitionsElement255 =null;


        CommonTree KW_ADD253_tree=null;
        RewriteRuleTokenStream stream_KW_ADD=new RewriteRuleTokenStream(adaptor,"token KW_ADD");
        RewriteRuleSubtreeStream stream_alterStatementSuffixAddPartitionsElement=new RewriteRuleSubtreeStream(adaptor,"rule alterStatementSuffixAddPartitionsElement");
        RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
         pushMsg("add partition statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:5: ( KW_ADD ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ -> { table }? ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) -> ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:7: KW_ADD ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+
            {
            KW_ADD253=(Token)match(input,KW_ADD,FOLLOW_KW_ADD_in_alterStatementSuffixAddPartitions4557);  
            stream_KW_ADD.add(KW_ADD253);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:14: ( ifNotExists )?
            int alt67=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt67=1;
                    }
                    break;
            }

            switch (alt67) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:14: ifNotExists
                    {
                    pushFollow(FOLLOW_ifNotExists_in_alterStatementSuffixAddPartitions4559);
                    ifNotExists254=ifNotExists();

                    state._fsp--;

                    stream_ifNotExists.add(ifNotExists254.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:27: ( alterStatementSuffixAddPartitionsElement )+
            int cnt68=0;
            loop68:
            do {
                int alt68=2;
                switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt68=1;
                    }
                    break;

                }

                switch (alt68) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1071:27: alterStatementSuffixAddPartitionsElement
            	    {
            	    pushFollow(FOLLOW_alterStatementSuffixAddPartitionsElement_in_alterStatementSuffixAddPartitions4562);
            	    alterStatementSuffixAddPartitionsElement255=alterStatementSuffixAddPartitionsElement();

            	    state._fsp--;

            	    stream_alterStatementSuffixAddPartitionsElement.add(alterStatementSuffixAddPartitionsElement255.getTree());

            	    }
            	    break;

            	default :
            	    if ( cnt68 >= 1 ) break loop68;
                        EarlyExitException eee =
                            new EarlyExitException(68, input);
                        throw eee;
                }
                cnt68++;
            } while (true);


            // AST REWRITE
            // elements: ifNotExists, ifNotExists, alterStatementSuffixAddPartitionsElement, alterStatementSuffixAddPartitionsElement
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1072:5: -> { table }? ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
            if ( table ) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1072:19: ^( TOK_ALTERTABLE_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_ADDPARTS, "TOK_ALTERTABLE_ADDPARTS")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1072:45: ( ifNotExists )?
                if ( stream_ifNotExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifNotExists.nextTree());

                }
                stream_ifNotExists.reset();

                if ( !(stream_alterStatementSuffixAddPartitionsElement.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_alterStatementSuffixAddPartitionsElement.hasNext() ) {
                    adaptor.addChild(root_1, stream_alterStatementSuffixAddPartitionsElement.nextTree());

                }
                stream_alterStatementSuffixAddPartitionsElement.reset();

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1073:5: -> ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1073:19: ^( TOK_ALTERVIEW_ADDPARTS ( ifNotExists )? ( alterStatementSuffixAddPartitionsElement )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERVIEW_ADDPARTS, "TOK_ALTERVIEW_ADDPARTS")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1073:44: ( ifNotExists )?
                if ( stream_ifNotExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifNotExists.nextTree());

                }
                stream_ifNotExists.reset();

                if ( !(stream_alterStatementSuffixAddPartitionsElement.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_alterStatementSuffixAddPartitionsElement.hasNext() ) {
                    adaptor.addChild(root_1, stream_alterStatementSuffixAddPartitionsElement.nextTree());

                }
                stream_alterStatementSuffixAddPartitionsElement.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixAddPartitions"


    public static class alterStatementSuffixAddPartitionsElement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixAddPartitionsElement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1076:1: alterStatementSuffixAddPartitionsElement : partitionSpec ( partitionLocation )? ;
    public final HiveParser.alterStatementSuffixAddPartitionsElement_return alterStatementSuffixAddPartitionsElement() throws RecognitionException {
        HiveParser.alterStatementSuffixAddPartitionsElement_return retval = new HiveParser.alterStatementSuffixAddPartitionsElement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec256 =null;

        HiveParser.partitionLocation_return partitionLocation257 =null;



        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1077:5: ( partitionSpec ( partitionLocation )? )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1077:7: partitionSpec ( partitionLocation )?
            {
            root_0 = (CommonTree)adaptor.nil();


            pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixAddPartitionsElement4625);
            partitionSpec256=partitionSpec();

            state._fsp--;

            adaptor.addChild(root_0, partitionSpec256.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1077:21: ( partitionLocation )?
            int alt69=2;
            switch ( input.LA(1) ) {
                case KW_LOCATION:
                    {
                    alt69=1;
                    }
                    break;
            }

            switch (alt69) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1077:21: partitionLocation
                    {
                    pushFollow(FOLLOW_partitionLocation_in_alterStatementSuffixAddPartitionsElement4627);
                    partitionLocation257=partitionLocation();

                    state._fsp--;

                    adaptor.addChild(root_0, partitionLocation257.getTree());

                    }
                    break;

            }


            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixAddPartitionsElement"


    public static class alterStatementSuffixTouch_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixTouch"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1080:1: alterStatementSuffixTouch : KW_TOUCH ( partitionSpec )* -> ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* ) ;
    public final HiveParser.alterStatementSuffixTouch_return alterStatementSuffixTouch() throws RecognitionException {
        HiveParser.alterStatementSuffixTouch_return retval = new HiveParser.alterStatementSuffixTouch_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_TOUCH258=null;
        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec259 =null;


        CommonTree KW_TOUCH258_tree=null;
        RewriteRuleTokenStream stream_KW_TOUCH=new RewriteRuleTokenStream(adaptor,"token KW_TOUCH");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("touch statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1083:5: ( KW_TOUCH ( partitionSpec )* -> ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1083:7: KW_TOUCH ( partitionSpec )*
            {
            KW_TOUCH258=(Token)match(input,KW_TOUCH,FOLLOW_KW_TOUCH_in_alterStatementSuffixTouch4655);  
            stream_KW_TOUCH.add(KW_TOUCH258);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1083:16: ( partitionSpec )*
            loop70:
            do {
                int alt70=2;
                switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt70=1;
                    }
                    break;

                }

                switch (alt70) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1083:17: partitionSpec
            	    {
            	    pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixTouch4658);
            	    partitionSpec259=partitionSpec();

            	    state._fsp--;

            	    stream_partitionSpec.add(partitionSpec259.getTree());

            	    }
            	    break;

            	default :
            	    break loop70;
                }
            } while (true);


            // AST REWRITE
            // elements: partitionSpec
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1084:5: -> ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1084:8: ^( TOK_ALTERTABLE_TOUCH ( partitionSpec )* )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_TOUCH, "TOK_ALTERTABLE_TOUCH")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1084:31: ( partitionSpec )*
                while ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixTouch"


    public static class alterStatementSuffixArchive_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixArchive"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1087:1: alterStatementSuffixArchive : KW_ARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* ) ;
    public final HiveParser.alterStatementSuffixArchive_return alterStatementSuffixArchive() throws RecognitionException {
        HiveParser.alterStatementSuffixArchive_return retval = new HiveParser.alterStatementSuffixArchive_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ARCHIVE260=null;
        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec261 =null;


        CommonTree KW_ARCHIVE260_tree=null;
        RewriteRuleTokenStream stream_KW_ARCHIVE=new RewriteRuleTokenStream(adaptor,"token KW_ARCHIVE");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("archive statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1090:5: ( KW_ARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1090:7: KW_ARCHIVE ( partitionSpec )*
            {
            KW_ARCHIVE260=(Token)match(input,KW_ARCHIVE,FOLLOW_KW_ARCHIVE_in_alterStatementSuffixArchive4702);  
            stream_KW_ARCHIVE.add(KW_ARCHIVE260);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1090:18: ( partitionSpec )*
            loop71:
            do {
                int alt71=2;
                switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt71=1;
                    }
                    break;

                }

                switch (alt71) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1090:19: partitionSpec
            	    {
            	    pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixArchive4705);
            	    partitionSpec261=partitionSpec();

            	    state._fsp--;

            	    stream_partitionSpec.add(partitionSpec261.getTree());

            	    }
            	    break;

            	default :
            	    break loop71;
                }
            } while (true);


            // AST REWRITE
            // elements: partitionSpec
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1091:5: -> ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1091:8: ^( TOK_ALTERTABLE_ARCHIVE ( partitionSpec )* )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_ARCHIVE, "TOK_ALTERTABLE_ARCHIVE")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1091:33: ( partitionSpec )*
                while ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixArchive"


    public static class alterStatementSuffixUnArchive_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixUnArchive"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1094:1: alterStatementSuffixUnArchive : KW_UNARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* ) ;
    public final HiveParser.alterStatementSuffixUnArchive_return alterStatementSuffixUnArchive() throws RecognitionException {
        HiveParser.alterStatementSuffixUnArchive_return retval = new HiveParser.alterStatementSuffixUnArchive_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_UNARCHIVE262=null;
        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec263 =null;


        CommonTree KW_UNARCHIVE262_tree=null;
        RewriteRuleTokenStream stream_KW_UNARCHIVE=new RewriteRuleTokenStream(adaptor,"token KW_UNARCHIVE");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("unarchive statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1097:5: ( KW_UNARCHIVE ( partitionSpec )* -> ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1097:7: KW_UNARCHIVE ( partitionSpec )*
            {
            KW_UNARCHIVE262=(Token)match(input,KW_UNARCHIVE,FOLLOW_KW_UNARCHIVE_in_alterStatementSuffixUnArchive4749);  
            stream_KW_UNARCHIVE.add(KW_UNARCHIVE262);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1097:20: ( partitionSpec )*
            loop72:
            do {
                int alt72=2;
                switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt72=1;
                    }
                    break;

                }

                switch (alt72) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1097:21: partitionSpec
            	    {
            	    pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixUnArchive4752);
            	    partitionSpec263=partitionSpec();

            	    state._fsp--;

            	    stream_partitionSpec.add(partitionSpec263.getTree());

            	    }
            	    break;

            	default :
            	    break loop72;
                }
            } while (true);


            // AST REWRITE
            // elements: partitionSpec
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1098:5: -> ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1098:8: ^( TOK_ALTERTABLE_UNARCHIVE ( partitionSpec )* )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_UNARCHIVE, "TOK_ALTERTABLE_UNARCHIVE")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1098:35: ( partitionSpec )*
                while ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixUnArchive"


    public static class partitionLocation_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "partitionLocation"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1101:1: partitionLocation : KW_LOCATION locn= StringLiteral -> ^( TOK_PARTITIONLOCATION $locn) ;
    public final HiveParser.partitionLocation_return partitionLocation() throws RecognitionException {
        HiveParser.partitionLocation_return retval = new HiveParser.partitionLocation_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token locn=null;
        Token KW_LOCATION264=null;

        CommonTree locn_tree=null;
        CommonTree KW_LOCATION264_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");

         pushMsg("partition location", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1104:5: ( KW_LOCATION locn= StringLiteral -> ^( TOK_PARTITIONLOCATION $locn) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:7: KW_LOCATION locn= StringLiteral
            {
            KW_LOCATION264=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_partitionLocation4802);  
            stream_KW_LOCATION.add(KW_LOCATION264);


            locn=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_partitionLocation4806);  
            stream_StringLiteral.add(locn);


            // AST REWRITE
            // elements: locn
            // token labels: locn
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_locn=new RewriteRuleTokenStream(adaptor,"token locn",locn);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1105:38: -> ^( TOK_PARTITIONLOCATION $locn)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1105:41: ^( TOK_PARTITIONLOCATION $locn)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_PARTITIONLOCATION, "TOK_PARTITIONLOCATION")
                , root_1);

                adaptor.addChild(root_1, stream_locn.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "partitionLocation"


    public static class alterStatementSuffixDropPartitions_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixDropPartitions"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1108:1: alterStatementSuffixDropPartitions[boolean table] : KW_DROP ( ifExists )? dropPartitionSpec ( COMMA dropPartitionSpec )* ( ignoreProtection )? -> { table }? ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? ) -> ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? ) ;
    public final HiveParser.alterStatementSuffixDropPartitions_return alterStatementSuffixDropPartitions(boolean table) throws RecognitionException {
        HiveParser.alterStatementSuffixDropPartitions_return retval = new HiveParser.alterStatementSuffixDropPartitions_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DROP265=null;
        Token COMMA268=null;
        HiveParser.ifExists_return ifExists266 =null;

        HiveParser_IdentifiersParser.dropPartitionSpec_return dropPartitionSpec267 =null;

        HiveParser_IdentifiersParser.dropPartitionSpec_return dropPartitionSpec269 =null;

        HiveParser.ignoreProtection_return ignoreProtection270 =null;


        CommonTree KW_DROP265_tree=null;
        CommonTree COMMA268_tree=null;
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_dropPartitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule dropPartitionSpec");
        RewriteRuleSubtreeStream stream_ignoreProtection=new RewriteRuleSubtreeStream(adaptor,"rule ignoreProtection");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
         pushMsg("drop partition statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:5: ( KW_DROP ( ifExists )? dropPartitionSpec ( COMMA dropPartitionSpec )* ( ignoreProtection )? -> { table }? ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? ) -> ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:7: KW_DROP ( ifExists )? dropPartitionSpec ( COMMA dropPartitionSpec )* ( ignoreProtection )?
            {
            KW_DROP265=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_alterStatementSuffixDropPartitions4843);  
            stream_KW_DROP.add(KW_DROP265);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:15: ( ifExists )?
            int alt73=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt73=1;
                    }
                    break;
            }

            switch (alt73) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:15: ifExists
                    {
                    pushFollow(FOLLOW_ifExists_in_alterStatementSuffixDropPartitions4845);
                    ifExists266=ifExists();

                    state._fsp--;

                    stream_ifExists.add(ifExists266.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_dropPartitionSpec_in_alterStatementSuffixDropPartitions4848);
            dropPartitionSpec267=dropPartitionSpec();

            state._fsp--;

            stream_dropPartitionSpec.add(dropPartitionSpec267.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:43: ( COMMA dropPartitionSpec )*
            loop74:
            do {
                int alt74=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt74=1;
                    }
                    break;

                }

                switch (alt74) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:44: COMMA dropPartitionSpec
            	    {
            	    COMMA268=(Token)match(input,COMMA,FOLLOW_COMMA_in_alterStatementSuffixDropPartitions4851);  
            	    stream_COMMA.add(COMMA268);


            	    pushFollow(FOLLOW_dropPartitionSpec_in_alterStatementSuffixDropPartitions4853);
            	    dropPartitionSpec269=dropPartitionSpec();

            	    state._fsp--;

            	    stream_dropPartitionSpec.add(dropPartitionSpec269.getTree());

            	    }
            	    break;

            	default :
            	    break loop74;
                }
            } while (true);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:70: ( ignoreProtection )?
            int alt75=2;
            switch ( input.LA(1) ) {
                case KW_IGNORE:
                    {
                    alt75=1;
                    }
                    break;
            }

            switch (alt75) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1111:70: ignoreProtection
                    {
                    pushFollow(FOLLOW_ignoreProtection_in_alterStatementSuffixDropPartitions4857);
                    ignoreProtection270=ignoreProtection();

                    state._fsp--;

                    stream_ignoreProtection.add(ignoreProtection270.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: dropPartitionSpec, ignoreProtection, ifExists, dropPartitionSpec, ignoreProtection, ifExists
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1112:5: -> { table }? ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? )
            if ( table ) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1112:19: ^( TOK_ALTERTABLE_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_DROPPARTS, "TOK_ALTERTABLE_DROPPARTS")
                , root_1);

                if ( !(stream_dropPartitionSpec.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_dropPartitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_dropPartitionSpec.nextTree());

                }
                stream_dropPartitionSpec.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1112:65: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1112:75: ( ignoreProtection )?
                if ( stream_ignoreProtection.hasNext() ) {
                    adaptor.addChild(root_1, stream_ignoreProtection.nextTree());

                }
                stream_ignoreProtection.reset();

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1113:5: -> ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1113:19: ^( TOK_ALTERVIEW_DROPPARTS ( dropPartitionSpec )+ ( ifExists )? ( ignoreProtection )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERVIEW_DROPPARTS, "TOK_ALTERVIEW_DROPPARTS")
                , root_1);

                if ( !(stream_dropPartitionSpec.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_dropPartitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_dropPartitionSpec.nextTree());

                }
                stream_dropPartitionSpec.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1113:64: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1113:74: ( ignoreProtection )?
                if ( stream_ignoreProtection.hasNext() ) {
                    adaptor.addChild(root_1, stream_ignoreProtection.nextTree());

                }
                stream_ignoreProtection.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixDropPartitions"


    public static class alterStatementSuffixProperties_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixProperties"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1116:1: alterStatementSuffixProperties : ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? ) );
    public final HiveParser.alterStatementSuffixProperties_return alterStatementSuffixProperties() throws RecognitionException {
        HiveParser.alterStatementSuffixProperties_return retval = new HiveParser.alterStatementSuffixProperties_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET271=null;
        Token KW_TBLPROPERTIES272=null;
        Token KW_UNSET274=null;
        Token KW_TBLPROPERTIES275=null;
        HiveParser.tableProperties_return tableProperties273 =null;

        HiveParser.ifExists_return ifExists276 =null;

        HiveParser.tableProperties_return tableProperties277 =null;


        CommonTree KW_SET271_tree=null;
        CommonTree KW_TBLPROPERTIES272_tree=null;
        CommonTree KW_UNSET274_tree=null;
        CommonTree KW_TBLPROPERTIES275_tree=null;
        RewriteRuleTokenStream stream_KW_UNSET=new RewriteRuleTokenStream(adaptor,"token KW_UNSET");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleTokenStream stream_KW_TBLPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_TBLPROPERTIES");
        RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
         pushMsg("alter properties statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1119:5: ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? ) )
            int alt77=2;
            switch ( input.LA(1) ) {
            case KW_SET:
                {
                alt77=1;
                }
                break;
            case KW_UNSET:
                {
                alt77=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 77, 0, input);

                throw nvae;

            }

            switch (alt77) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1119:7: KW_SET KW_TBLPROPERTIES tableProperties
                    {
                    KW_SET271=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixProperties4936);  
                    stream_KW_SET.add(KW_SET271);


                    KW_TBLPROPERTIES272=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterStatementSuffixProperties4938);  
                    stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES272);


                    pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixProperties4940);
                    tableProperties273=tableProperties();

                    state._fsp--;

                    stream_tableProperties.add(tableProperties273.getTree());

                    // AST REWRITE
                    // elements: tableProperties
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1120:5: -> ^( TOK_ALTERTABLE_PROPERTIES tableProperties )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1120:8: ^( TOK_ALTERTABLE_PROPERTIES tableProperties )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_PROPERTIES, "TOK_ALTERTABLE_PROPERTIES")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableProperties.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1121:7: KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties
                    {
                    KW_UNSET274=(Token)match(input,KW_UNSET,FOLLOW_KW_UNSET_in_alterStatementSuffixProperties4960);  
                    stream_KW_UNSET.add(KW_UNSET274);


                    KW_TBLPROPERTIES275=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterStatementSuffixProperties4962);  
                    stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES275);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1121:33: ( ifExists )?
                    int alt76=2;
                    switch ( input.LA(1) ) {
                        case KW_IF:
                            {
                            alt76=1;
                            }
                            break;
                    }

                    switch (alt76) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1121:33: ifExists
                            {
                            pushFollow(FOLLOW_ifExists_in_alterStatementSuffixProperties4964);
                            ifExists276=ifExists();

                            state._fsp--;

                            stream_ifExists.add(ifExists276.getTree());

                            }
                            break;

                    }


                    pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixProperties4967);
                    tableProperties277=tableProperties();

                    state._fsp--;

                    stream_tableProperties.add(tableProperties277.getTree());

                    // AST REWRITE
                    // elements: tableProperties, ifExists
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1122:5: -> ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1122:8: ^( TOK_ALTERTABLE_DROPPROPERTIES tableProperties ( ifExists )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_DROPPROPERTIES, "TOK_ALTERTABLE_DROPPROPERTIES")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableProperties.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1122:56: ( ifExists )?
                        if ( stream_ifExists.hasNext() ) {
                            adaptor.addChild(root_1, stream_ifExists.nextTree());

                        }
                        stream_ifExists.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixProperties"


    public static class alterViewSuffixProperties_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterViewSuffixProperties"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1125:1: alterViewSuffixProperties : ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERVIEW_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? ) );
    public final HiveParser.alterViewSuffixProperties_return alterViewSuffixProperties() throws RecognitionException {
        HiveParser.alterViewSuffixProperties_return retval = new HiveParser.alterViewSuffixProperties_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET278=null;
        Token KW_TBLPROPERTIES279=null;
        Token KW_UNSET281=null;
        Token KW_TBLPROPERTIES282=null;
        HiveParser.tableProperties_return tableProperties280 =null;

        HiveParser.ifExists_return ifExists283 =null;

        HiveParser.tableProperties_return tableProperties284 =null;


        CommonTree KW_SET278_tree=null;
        CommonTree KW_TBLPROPERTIES279_tree=null;
        CommonTree KW_UNSET281_tree=null;
        CommonTree KW_TBLPROPERTIES282_tree=null;
        RewriteRuleTokenStream stream_KW_UNSET=new RewriteRuleTokenStream(adaptor,"token KW_UNSET");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleTokenStream stream_KW_TBLPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_TBLPROPERTIES");
        RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
         pushMsg("alter view properties statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1128:5: ( KW_SET KW_TBLPROPERTIES tableProperties -> ^( TOK_ALTERVIEW_PROPERTIES tableProperties ) | KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties -> ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? ) )
            int alt79=2;
            switch ( input.LA(1) ) {
            case KW_SET:
                {
                alt79=1;
                }
                break;
            case KW_UNSET:
                {
                alt79=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 79, 0, input);

                throw nvae;

            }

            switch (alt79) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1128:7: KW_SET KW_TBLPROPERTIES tableProperties
                    {
                    KW_SET278=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterViewSuffixProperties5009);  
                    stream_KW_SET.add(KW_SET278);


                    KW_TBLPROPERTIES279=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterViewSuffixProperties5011);  
                    stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES279);


                    pushFollow(FOLLOW_tableProperties_in_alterViewSuffixProperties5013);
                    tableProperties280=tableProperties();

                    state._fsp--;

                    stream_tableProperties.add(tableProperties280.getTree());

                    // AST REWRITE
                    // elements: tableProperties
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1129:5: -> ^( TOK_ALTERVIEW_PROPERTIES tableProperties )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1129:8: ^( TOK_ALTERVIEW_PROPERTIES tableProperties )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERVIEW_PROPERTIES, "TOK_ALTERVIEW_PROPERTIES")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableProperties.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1130:7: KW_UNSET KW_TBLPROPERTIES ( ifExists )? tableProperties
                    {
                    KW_UNSET281=(Token)match(input,KW_UNSET,FOLLOW_KW_UNSET_in_alterViewSuffixProperties5033);  
                    stream_KW_UNSET.add(KW_UNSET281);


                    KW_TBLPROPERTIES282=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_alterViewSuffixProperties5035);  
                    stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES282);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1130:33: ( ifExists )?
                    int alt78=2;
                    switch ( input.LA(1) ) {
                        case KW_IF:
                            {
                            alt78=1;
                            }
                            break;
                    }

                    switch (alt78) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1130:33: ifExists
                            {
                            pushFollow(FOLLOW_ifExists_in_alterViewSuffixProperties5037);
                            ifExists283=ifExists();

                            state._fsp--;

                            stream_ifExists.add(ifExists283.getTree());

                            }
                            break;

                    }


                    pushFollow(FOLLOW_tableProperties_in_alterViewSuffixProperties5040);
                    tableProperties284=tableProperties();

                    state._fsp--;

                    stream_tableProperties.add(tableProperties284.getTree());

                    // AST REWRITE
                    // elements: ifExists, tableProperties
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1131:5: -> ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1131:8: ^( TOK_ALTERVIEW_DROPPROPERTIES tableProperties ( ifExists )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERVIEW_DROPPROPERTIES, "TOK_ALTERVIEW_DROPPROPERTIES")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableProperties.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1131:55: ( ifExists )?
                        if ( stream_ifExists.hasNext() ) {
                            adaptor.addChild(root_1, stream_ifExists.nextTree());

                        }
                        stream_ifExists.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterViewSuffixProperties"


    public static class alterStatementSuffixSerdeProperties_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixSerdeProperties"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1134:1: alterStatementSuffixSerdeProperties : ( KW_SET KW_SERDE serdeName= StringLiteral ( KW_WITH KW_SERDEPROPERTIES tableProperties )? -> ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? ) | KW_SET KW_SERDEPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties ) );
    public final HiveParser.alterStatementSuffixSerdeProperties_return alterStatementSuffixSerdeProperties() throws RecognitionException {
        HiveParser.alterStatementSuffixSerdeProperties_return retval = new HiveParser.alterStatementSuffixSerdeProperties_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token serdeName=null;
        Token KW_SET285=null;
        Token KW_SERDE286=null;
        Token KW_WITH287=null;
        Token KW_SERDEPROPERTIES288=null;
        Token KW_SET290=null;
        Token KW_SERDEPROPERTIES291=null;
        HiveParser.tableProperties_return tableProperties289 =null;

        HiveParser.tableProperties_return tableProperties292 =null;


        CommonTree serdeName_tree=null;
        CommonTree KW_SET285_tree=null;
        CommonTree KW_SERDE286_tree=null;
        CommonTree KW_WITH287_tree=null;
        CommonTree KW_SERDEPROPERTIES288_tree=null;
        CommonTree KW_SET290_tree=null;
        CommonTree KW_SERDEPROPERTIES291_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleTokenStream stream_KW_SERDE=new RewriteRuleTokenStream(adaptor,"token KW_SERDE");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleTokenStream stream_KW_SERDEPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_SERDEPROPERTIES");
        RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
         pushMsg("alter serdes statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1137:5: ( KW_SET KW_SERDE serdeName= StringLiteral ( KW_WITH KW_SERDEPROPERTIES tableProperties )? -> ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? ) | KW_SET KW_SERDEPROPERTIES tableProperties -> ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties ) )
            int alt81=2;
            switch ( input.LA(1) ) {
            case KW_SET:
                {
                switch ( input.LA(2) ) {
                case KW_SERDE:
                    {
                    alt81=1;
                    }
                    break;
                case KW_SERDEPROPERTIES:
                    {
                    alt81=2;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 81, 1, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 81, 0, input);

                throw nvae;

            }

            switch (alt81) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1137:7: KW_SET KW_SERDE serdeName= StringLiteral ( KW_WITH KW_SERDEPROPERTIES tableProperties )?
                    {
                    KW_SET285=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixSerdeProperties5082);  
                    stream_KW_SET.add(KW_SET285);


                    KW_SERDE286=(Token)match(input,KW_SERDE,FOLLOW_KW_SERDE_in_alterStatementSuffixSerdeProperties5084);  
                    stream_KW_SERDE.add(KW_SERDE286);


                    serdeName=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixSerdeProperties5088);  
                    stream_StringLiteral.add(serdeName);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1137:47: ( KW_WITH KW_SERDEPROPERTIES tableProperties )?
                    int alt80=2;
                    switch ( input.LA(1) ) {
                        case KW_WITH:
                            {
                            alt80=1;
                            }
                            break;
                    }

                    switch (alt80) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1137:48: KW_WITH KW_SERDEPROPERTIES tableProperties
                            {
                            KW_WITH287=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_alterStatementSuffixSerdeProperties5091);  
                            stream_KW_WITH.add(KW_WITH287);


                            KW_SERDEPROPERTIES288=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_alterStatementSuffixSerdeProperties5093);  
                            stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES288);


                            pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixSerdeProperties5095);
                            tableProperties289=tableProperties();

                            state._fsp--;

                            stream_tableProperties.add(tableProperties289.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: tableProperties, serdeName
                    // token labels: serdeName
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_serdeName=new RewriteRuleTokenStream(adaptor,"token serdeName",serdeName);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1138:5: -> ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1138:8: ^( TOK_ALTERTABLE_SERIALIZER $serdeName ( tableProperties )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_SERIALIZER, "TOK_ALTERTABLE_SERIALIZER")
                        , root_1);

                        adaptor.addChild(root_1, stream_serdeName.nextNode());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1138:47: ( tableProperties )?
                        if ( stream_tableProperties.hasNext() ) {
                            adaptor.addChild(root_1, stream_tableProperties.nextTree());

                        }
                        stream_tableProperties.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1139:7: KW_SET KW_SERDEPROPERTIES tableProperties
                    {
                    KW_SET290=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixSerdeProperties5121);  
                    stream_KW_SET.add(KW_SET290);


                    KW_SERDEPROPERTIES291=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_alterStatementSuffixSerdeProperties5123);  
                    stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES291);


                    pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixSerdeProperties5125);
                    tableProperties292=tableProperties();

                    state._fsp--;

                    stream_tableProperties.add(tableProperties292.getTree());

                    // AST REWRITE
                    // elements: tableProperties
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1140:5: -> ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1140:8: ^( TOK_ALTERTABLE_SERDEPROPERTIES tableProperties )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_SERDEPROPERTIES, "TOK_ALTERTABLE_SERDEPROPERTIES")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableProperties.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixSerdeProperties"


    public static class tablePartitionPrefix_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tablePartitionPrefix"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1143:1: tablePartitionPrefix : tableName ( partitionSpec )? -> ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? ) ;
    public final HiveParser.tablePartitionPrefix_return tablePartitionPrefix() throws RecognitionException {
        HiveParser.tablePartitionPrefix_return retval = new HiveParser.tablePartitionPrefix_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser_FromClauseParser.tableName_return tableName293 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec294 =null;


        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
        pushMsg("table partition prefix", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1146:3: ( tableName ( partitionSpec )? -> ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1146:5: tableName ( partitionSpec )?
            {
            pushFollow(FOLLOW_tableName_in_tablePartitionPrefix5162);
            tableName293=tableName();

            state._fsp--;

            stream_tableName.add(tableName293.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1146:15: ( partitionSpec )?
            int alt82=2;
            switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt82=1;
                    }
                    break;
            }

            switch (alt82) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1146:15: partitionSpec
                    {
                    pushFollow(FOLLOW_partitionSpec_in_tablePartitionPrefix5164);
                    partitionSpec294=partitionSpec();

                    state._fsp--;

                    stream_partitionSpec.add(partitionSpec294.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: partitionSpec, tableName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1147:3: -> ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1147:5: ^( TOK_TABLE_PARTITION tableName ( partitionSpec )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLE_PARTITION, "TOK_TABLE_PARTITION")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1147:37: ( partitionSpec )?
                if ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tablePartitionPrefix"


    public static class alterStatementSuffixFileFormat_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixFileFormat"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1150:1: alterStatementSuffixFileFormat : KW_SET KW_FILEFORMAT fileFormat -> ^( TOK_ALTERTABLE_FILEFORMAT fileFormat ) ;
    public final HiveParser.alterStatementSuffixFileFormat_return alterStatementSuffixFileFormat() throws RecognitionException {
        HiveParser.alterStatementSuffixFileFormat_return retval = new HiveParser.alterStatementSuffixFileFormat_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET295=null;
        Token KW_FILEFORMAT296=null;
        HiveParser.fileFormat_return fileFormat297 =null;


        CommonTree KW_SET295_tree=null;
        CommonTree KW_FILEFORMAT296_tree=null;
        RewriteRuleTokenStream stream_KW_FILEFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_FILEFORMAT");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleSubtreeStream stream_fileFormat=new RewriteRuleSubtreeStream(adaptor,"rule fileFormat");
        pushMsg("alter fileformat statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1153:2: ( KW_SET KW_FILEFORMAT fileFormat -> ^( TOK_ALTERTABLE_FILEFORMAT fileFormat ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1153:4: KW_SET KW_FILEFORMAT fileFormat
            {
            KW_SET295=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixFileFormat5199);  
            stream_KW_SET.add(KW_SET295);


            KW_FILEFORMAT296=(Token)match(input,KW_FILEFORMAT,FOLLOW_KW_FILEFORMAT_in_alterStatementSuffixFileFormat5201);  
            stream_KW_FILEFORMAT.add(KW_FILEFORMAT296);


            pushFollow(FOLLOW_fileFormat_in_alterStatementSuffixFileFormat5203);
            fileFormat297=fileFormat();

            state._fsp--;

            stream_fileFormat.add(fileFormat297.getTree());

            // AST REWRITE
            // elements: fileFormat
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1154:2: -> ^( TOK_ALTERTABLE_FILEFORMAT fileFormat )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1154:5: ^( TOK_ALTERTABLE_FILEFORMAT fileFormat )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_FILEFORMAT, "TOK_ALTERTABLE_FILEFORMAT")
                , root_1);

                adaptor.addChild(root_1, stream_fileFormat.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixFileFormat"


    public static class alterStatementSuffixClusterbySortby_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixClusterbySortby"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1157:1: alterStatementSuffixClusterbySortby : ( KW_NOT KW_CLUSTERED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED ) | KW_NOT KW_SORTED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED ) | tableBuckets -> ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets ) );
    public final HiveParser.alterStatementSuffixClusterbySortby_return alterStatementSuffixClusterbySortby() throws RecognitionException {
        HiveParser.alterStatementSuffixClusterbySortby_return retval = new HiveParser.alterStatementSuffixClusterbySortby_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_NOT298=null;
        Token KW_CLUSTERED299=null;
        Token KW_NOT300=null;
        Token KW_SORTED301=null;
        HiveParser.tableBuckets_return tableBuckets302 =null;


        CommonTree KW_NOT298_tree=null;
        CommonTree KW_CLUSTERED299_tree=null;
        CommonTree KW_NOT300_tree=null;
        CommonTree KW_SORTED301_tree=null;
        RewriteRuleTokenStream stream_KW_CLUSTERED=new RewriteRuleTokenStream(adaptor,"token KW_CLUSTERED");
        RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
        RewriteRuleTokenStream stream_KW_SORTED=new RewriteRuleTokenStream(adaptor,"token KW_SORTED");
        RewriteRuleSubtreeStream stream_tableBuckets=new RewriteRuleSubtreeStream(adaptor,"rule tableBuckets");
        pushMsg("alter partition cluster by sort by statement", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1160:3: ( KW_NOT KW_CLUSTERED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED ) | KW_NOT KW_SORTED -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED ) | tableBuckets -> ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets ) )
            int alt83=3;
            switch ( input.LA(1) ) {
            case KW_NOT:
                {
                switch ( input.LA(2) ) {
                case KW_CLUSTERED:
                    {
                    alt83=1;
                    }
                    break;
                case KW_SORTED:
                    {
                    alt83=2;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 83, 1, input);

                    throw nvae;

                }

                }
                break;
            case KW_CLUSTERED:
                {
                alt83=3;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 83, 0, input);

                throw nvae;

            }

            switch (alt83) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1160:5: KW_NOT KW_CLUSTERED
                    {
                    KW_NOT298=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixClusterbySortby5234);  
                    stream_KW_NOT.add(KW_NOT298);


                    KW_CLUSTERED299=(Token)match(input,KW_CLUSTERED,FOLLOW_KW_CLUSTERED_in_alterStatementSuffixClusterbySortby5236);  
                    stream_KW_CLUSTERED.add(KW_CLUSTERED299);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1160:25: -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1160:28: ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_CLUSTERED )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_CLUSTER_SORT, "TOK_ALTERTABLE_CLUSTER_SORT")
                        , root_1);

                        adaptor.addChild(root_1, 
                        (CommonTree)adaptor.create(TOK_NOT_CLUSTERED, "TOK_NOT_CLUSTERED")
                        );

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1161:5: KW_NOT KW_SORTED
                    {
                    KW_NOT300=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixClusterbySortby5250);  
                    stream_KW_NOT.add(KW_NOT300);


                    KW_SORTED301=(Token)match(input,KW_SORTED,FOLLOW_KW_SORTED_in_alterStatementSuffixClusterbySortby5252);  
                    stream_KW_SORTED.add(KW_SORTED301);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1161:22: -> ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1161:25: ^( TOK_ALTERTABLE_CLUSTER_SORT TOK_NOT_SORTED )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_CLUSTER_SORT, "TOK_ALTERTABLE_CLUSTER_SORT")
                        , root_1);

                        adaptor.addChild(root_1, 
                        (CommonTree)adaptor.create(TOK_NOT_SORTED, "TOK_NOT_SORTED")
                        );

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1162:5: tableBuckets
                    {
                    pushFollow(FOLLOW_tableBuckets_in_alterStatementSuffixClusterbySortby5266);
                    tableBuckets302=tableBuckets();

                    state._fsp--;

                    stream_tableBuckets.add(tableBuckets302.getTree());

                    // AST REWRITE
                    // elements: tableBuckets
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1162:18: -> ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1162:21: ^( TOK_ALTERTABLE_CLUSTER_SORT tableBuckets )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_CLUSTER_SORT, "TOK_ALTERTABLE_CLUSTER_SORT")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableBuckets.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixClusterbySortby"


    public static class alterTblPartitionStatementSuffixSkewedLocation_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterTblPartitionStatementSuffixSkewedLocation"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1165:1: alterTblPartitionStatementSuffixSkewedLocation : KW_SET KW_SKEWED KW_LOCATION skewedLocations -> ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations ) ;
    public final HiveParser.alterTblPartitionStatementSuffixSkewedLocation_return alterTblPartitionStatementSuffixSkewedLocation() throws RecognitionException {
        HiveParser.alterTblPartitionStatementSuffixSkewedLocation_return retval = new HiveParser.alterTblPartitionStatementSuffixSkewedLocation_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET303=null;
        Token KW_SKEWED304=null;
        Token KW_LOCATION305=null;
        HiveParser.skewedLocations_return skewedLocations306 =null;


        CommonTree KW_SET303_tree=null;
        CommonTree KW_SKEWED304_tree=null;
        CommonTree KW_LOCATION305_tree=null;
        RewriteRuleTokenStream stream_KW_SKEWED=new RewriteRuleTokenStream(adaptor,"token KW_SKEWED");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");
        RewriteRuleSubtreeStream stream_skewedLocations=new RewriteRuleSubtreeStream(adaptor,"rule skewedLocations");
        pushMsg("alter partition skewed location", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1168:3: ( KW_SET KW_SKEWED KW_LOCATION skewedLocations -> ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1168:5: KW_SET KW_SKEWED KW_LOCATION skewedLocations
            {
            KW_SET303=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterTblPartitionStatementSuffixSkewedLocation5297);  
            stream_KW_SET.add(KW_SET303);


            KW_SKEWED304=(Token)match(input,KW_SKEWED,FOLLOW_KW_SKEWED_in_alterTblPartitionStatementSuffixSkewedLocation5299);  
            stream_KW_SKEWED.add(KW_SKEWED304);


            KW_LOCATION305=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_alterTblPartitionStatementSuffixSkewedLocation5301);  
            stream_KW_LOCATION.add(KW_LOCATION305);


            pushFollow(FOLLOW_skewedLocations_in_alterTblPartitionStatementSuffixSkewedLocation5303);
            skewedLocations306=skewedLocations();

            state._fsp--;

            stream_skewedLocations.add(skewedLocations306.getTree());

            // AST REWRITE
            // elements: skewedLocations
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1169:3: -> ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1169:6: ^( TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_SKEWED_LOCATION, "TOK_ALTERTABLE_SKEWED_LOCATION")
                , root_1);

                adaptor.addChild(root_1, stream_skewedLocations.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterTblPartitionStatementSuffixSkewedLocation"


    public static class skewedLocations_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedLocations"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1172:1: skewedLocations : LPAREN skewedLocationsList RPAREN -> ^( TOK_SKEWED_LOCATIONS skewedLocationsList ) ;
    public final HiveParser.skewedLocations_return skewedLocations() throws RecognitionException {
        HiveParser.skewedLocations_return retval = new HiveParser.skewedLocations_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token LPAREN307=null;
        Token RPAREN309=null;
        HiveParser.skewedLocationsList_return skewedLocationsList308 =null;


        CommonTree LPAREN307_tree=null;
        CommonTree RPAREN309_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_skewedLocationsList=new RewriteRuleSubtreeStream(adaptor,"rule skewedLocationsList");
         pushMsg("skewed locations", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1175:5: ( LPAREN skewedLocationsList RPAREN -> ^( TOK_SKEWED_LOCATIONS skewedLocationsList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1176:7: LPAREN skewedLocationsList RPAREN
            {
            LPAREN307=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_skewedLocations5346);  
            stream_LPAREN.add(LPAREN307);


            pushFollow(FOLLOW_skewedLocationsList_in_skewedLocations5348);
            skewedLocationsList308=skewedLocationsList();

            state._fsp--;

            stream_skewedLocationsList.add(skewedLocationsList308.getTree());

            RPAREN309=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_skewedLocations5350);  
            stream_RPAREN.add(RPAREN309);


            // AST REWRITE
            // elements: skewedLocationsList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1176:41: -> ^( TOK_SKEWED_LOCATIONS skewedLocationsList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1176:44: ^( TOK_SKEWED_LOCATIONS skewedLocationsList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SKEWED_LOCATIONS, "TOK_SKEWED_LOCATIONS")
                , root_1);

                adaptor.addChild(root_1, stream_skewedLocationsList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedLocations"


    public static class skewedLocationsList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedLocationsList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1179:1: skewedLocationsList : skewedLocationMap ( COMMA skewedLocationMap )* -> ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ ) ;
    public final HiveParser.skewedLocationsList_return skewedLocationsList() throws RecognitionException {
        HiveParser.skewedLocationsList_return retval = new HiveParser.skewedLocationsList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA311=null;
        HiveParser.skewedLocationMap_return skewedLocationMap310 =null;

        HiveParser.skewedLocationMap_return skewedLocationMap312 =null;


        CommonTree COMMA311_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_skewedLocationMap=new RewriteRuleSubtreeStream(adaptor,"rule skewedLocationMap");
         pushMsg("skewed locations list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1182:5: ( skewedLocationMap ( COMMA skewedLocationMap )* -> ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1183:7: skewedLocationMap ( COMMA skewedLocationMap )*
            {
            pushFollow(FOLLOW_skewedLocationMap_in_skewedLocationsList5391);
            skewedLocationMap310=skewedLocationMap();

            state._fsp--;

            stream_skewedLocationMap.add(skewedLocationMap310.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1183:25: ( COMMA skewedLocationMap )*
            loop84:
            do {
                int alt84=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt84=1;
                    }
                    break;

                }

                switch (alt84) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1183:26: COMMA skewedLocationMap
            	    {
            	    COMMA311=(Token)match(input,COMMA,FOLLOW_COMMA_in_skewedLocationsList5394);  
            	    stream_COMMA.add(COMMA311);


            	    pushFollow(FOLLOW_skewedLocationMap_in_skewedLocationsList5396);
            	    skewedLocationMap312=skewedLocationMap();

            	    state._fsp--;

            	    stream_skewedLocationMap.add(skewedLocationMap312.getTree());

            	    }
            	    break;

            	default :
            	    break loop84;
                }
            } while (true);


            // AST REWRITE
            // elements: skewedLocationMap
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1183:52: -> ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1183:55: ^( TOK_SKEWED_LOCATION_LIST ( skewedLocationMap )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SKEWED_LOCATION_LIST, "TOK_SKEWED_LOCATION_LIST")
                , root_1);

                if ( !(stream_skewedLocationMap.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_skewedLocationMap.hasNext() ) {
                    adaptor.addChild(root_1, stream_skewedLocationMap.nextTree());

                }
                stream_skewedLocationMap.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedLocationsList"


    public static class skewedLocationMap_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedLocationMap"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1186:1: skewedLocationMap : key= skewedValueLocationElement EQUAL value= StringLiteral -> ^( TOK_SKEWED_LOCATION_MAP $key $value) ;
    public final HiveParser.skewedLocationMap_return skewedLocationMap() throws RecognitionException {
        HiveParser.skewedLocationMap_return retval = new HiveParser.skewedLocationMap_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token value=null;
        Token EQUAL313=null;
        HiveParser.skewedValueLocationElement_return key =null;


        CommonTree value_tree=null;
        CommonTree EQUAL313_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_EQUAL=new RewriteRuleTokenStream(adaptor,"token EQUAL");
        RewriteRuleSubtreeStream stream_skewedValueLocationElement=new RewriteRuleSubtreeStream(adaptor,"rule skewedValueLocationElement");
         pushMsg("specifying skewed location map", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1189:5: (key= skewedValueLocationElement EQUAL value= StringLiteral -> ^( TOK_SKEWED_LOCATION_MAP $key $value) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1190:7: key= skewedValueLocationElement EQUAL value= StringLiteral
            {
            pushFollow(FOLLOW_skewedValueLocationElement_in_skewedLocationMap5442);
            key=skewedValueLocationElement();

            state._fsp--;

            stream_skewedValueLocationElement.add(key.getTree());

            EQUAL313=(Token)match(input,EQUAL,FOLLOW_EQUAL_in_skewedLocationMap5444);  
            stream_EQUAL.add(EQUAL313);


            value=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_skewedLocationMap5448);  
            stream_StringLiteral.add(value);


            // AST REWRITE
            // elements: value, key
            // token labels: value
            // rule labels: retval, key
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_value=new RewriteRuleTokenStream(adaptor,"token value",value);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_key=new RewriteRuleSubtreeStream(adaptor,"rule key",key!=null?key.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1190:64: -> ^( TOK_SKEWED_LOCATION_MAP $key $value)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1190:67: ^( TOK_SKEWED_LOCATION_MAP $key $value)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SKEWED_LOCATION_MAP, "TOK_SKEWED_LOCATION_MAP")
                , root_1);

                adaptor.addChild(root_1, stream_key.nextTree());

                adaptor.addChild(root_1, stream_value.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedLocationMap"


    public static class alterStatementSuffixLocation_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixLocation"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1193:1: alterStatementSuffixLocation : KW_SET KW_LOCATION newLoc= StringLiteral -> ^( TOK_ALTERTABLE_LOCATION $newLoc) ;
    public final HiveParser.alterStatementSuffixLocation_return alterStatementSuffixLocation() throws RecognitionException {
        HiveParser.alterStatementSuffixLocation_return retval = new HiveParser.alterStatementSuffixLocation_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token newLoc=null;
        Token KW_SET314=null;
        Token KW_LOCATION315=null;

        CommonTree newLoc_tree=null;
        CommonTree KW_SET314_tree=null;
        CommonTree KW_LOCATION315_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");

        pushMsg("alter location", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1196:3: ( KW_SET KW_LOCATION newLoc= StringLiteral -> ^( TOK_ALTERTABLE_LOCATION $newLoc) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1196:5: KW_SET KW_LOCATION newLoc= StringLiteral
            {
            KW_SET314=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixLocation5485);  
            stream_KW_SET.add(KW_SET314);


            KW_LOCATION315=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_alterStatementSuffixLocation5487);  
            stream_KW_LOCATION.add(KW_LOCATION315);


            newLoc=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixLocation5491);  
            stream_StringLiteral.add(newLoc);


            // AST REWRITE
            // elements: newLoc
            // token labels: newLoc
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_newLoc=new RewriteRuleTokenStream(adaptor,"token newLoc",newLoc);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1197:3: -> ^( TOK_ALTERTABLE_LOCATION $newLoc)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1197:6: ^( TOK_ALTERTABLE_LOCATION $newLoc)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_LOCATION, "TOK_ALTERTABLE_LOCATION")
                , root_1);

                adaptor.addChild(root_1, stream_newLoc.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixLocation"


    public static class alterStatementSuffixSkewedby_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixSkewedby"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1201:1: alterStatementSuffixSkewedby : ( tableSkewed -> ^( TOK_ALTERTABLE_SKEWED tableSkewed ) | KW_NOT KW_SKEWED -> ^( TOK_ALTERTABLE_SKEWED ) | KW_NOT storedAsDirs -> ^( TOK_ALTERTABLE_SKEWED storedAsDirs ) );
    public final HiveParser.alterStatementSuffixSkewedby_return alterStatementSuffixSkewedby() throws RecognitionException {
        HiveParser.alterStatementSuffixSkewedby_return retval = new HiveParser.alterStatementSuffixSkewedby_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_NOT317=null;
        Token KW_SKEWED318=null;
        Token KW_NOT319=null;
        HiveParser.tableSkewed_return tableSkewed316 =null;

        HiveParser.storedAsDirs_return storedAsDirs320 =null;


        CommonTree KW_NOT317_tree=null;
        CommonTree KW_SKEWED318_tree=null;
        CommonTree KW_NOT319_tree=null;
        RewriteRuleTokenStream stream_KW_SKEWED=new RewriteRuleTokenStream(adaptor,"token KW_SKEWED");
        RewriteRuleTokenStream stream_KW_NOT=new RewriteRuleTokenStream(adaptor,"token KW_NOT");
        RewriteRuleSubtreeStream stream_storedAsDirs=new RewriteRuleSubtreeStream(adaptor,"rule storedAsDirs");
        RewriteRuleSubtreeStream stream_tableSkewed=new RewriteRuleSubtreeStream(adaptor,"rule tableSkewed");
        pushMsg("alter skewed by statement", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1204:2: ( tableSkewed -> ^( TOK_ALTERTABLE_SKEWED tableSkewed ) | KW_NOT KW_SKEWED -> ^( TOK_ALTERTABLE_SKEWED ) | KW_NOT storedAsDirs -> ^( TOK_ALTERTABLE_SKEWED storedAsDirs ) )
            int alt85=3;
            switch ( input.LA(1) ) {
            case KW_SKEWED:
                {
                alt85=1;
                }
                break;
            case KW_NOT:
                {
                switch ( input.LA(2) ) {
                case KW_SKEWED:
                    {
                    alt85=2;
                    }
                    break;
                case KW_STORED:
                    {
                    alt85=3;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 85, 2, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 85, 0, input);

                throw nvae;

            }

            switch (alt85) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1204:4: tableSkewed
                    {
                    pushFollow(FOLLOW_tableSkewed_in_alterStatementSuffixSkewedby5525);
                    tableSkewed316=tableSkewed();

                    state._fsp--;

                    stream_tableSkewed.add(tableSkewed316.getTree());

                    // AST REWRITE
                    // elements: tableSkewed
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1205:2: -> ^( TOK_ALTERTABLE_SKEWED tableSkewed )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1205:4: ^( TOK_ALTERTABLE_SKEWED tableSkewed )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_SKEWED, "TOK_ALTERTABLE_SKEWED")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableSkewed.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1207:3: KW_NOT KW_SKEWED
                    {
                    KW_NOT317=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixSkewedby5540);  
                    stream_KW_NOT.add(KW_NOT317);


                    KW_SKEWED318=(Token)match(input,KW_SKEWED,FOLLOW_KW_SKEWED_in_alterStatementSuffixSkewedby5542);  
                    stream_KW_SKEWED.add(KW_SKEWED318);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1208:2: -> ^( TOK_ALTERTABLE_SKEWED )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1208:4: ^( TOK_ALTERTABLE_SKEWED )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_SKEWED, "TOK_ALTERTABLE_SKEWED")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1210:3: KW_NOT storedAsDirs
                    {
                    KW_NOT319=(Token)match(input,KW_NOT,FOLLOW_KW_NOT_in_alterStatementSuffixSkewedby5555);  
                    stream_KW_NOT.add(KW_NOT319);


                    pushFollow(FOLLOW_storedAsDirs_in_alterStatementSuffixSkewedby5557);
                    storedAsDirs320=storedAsDirs();

                    state._fsp--;

                    stream_storedAsDirs.add(storedAsDirs320.getTree());

                    // AST REWRITE
                    // elements: storedAsDirs
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1211:2: -> ^( TOK_ALTERTABLE_SKEWED storedAsDirs )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1211:4: ^( TOK_ALTERTABLE_SKEWED storedAsDirs )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ALTERTABLE_SKEWED, "TOK_ALTERTABLE_SKEWED")
                        , root_1);

                        adaptor.addChild(root_1, stream_storedAsDirs.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixSkewedby"


    public static class alterStatementSuffixExchangePartition_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixExchangePartition"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1214:1: alterStatementSuffixExchangePartition : KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename= tableName -> ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename) ;
    public final HiveParser.alterStatementSuffixExchangePartition_return alterStatementSuffixExchangePartition() throws RecognitionException {
        HiveParser.alterStatementSuffixExchangePartition_return retval = new HiveParser.alterStatementSuffixExchangePartition_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_EXCHANGE321=null;
        Token KW_WITH323=null;
        Token KW_TABLE324=null;
        HiveParser_FromClauseParser.tableName_return exchangename =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec322 =null;


        CommonTree KW_EXCHANGE321_tree=null;
        CommonTree KW_WITH323_tree=null;
        CommonTree KW_TABLE324_tree=null;
        RewriteRuleTokenStream stream_KW_EXCHANGE=new RewriteRuleTokenStream(adaptor,"token KW_EXCHANGE");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
        pushMsg("alter exchange partition", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1217:5: ( KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename= tableName -> ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1217:7: KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename= tableName
            {
            KW_EXCHANGE321=(Token)match(input,KW_EXCHANGE,FOLLOW_KW_EXCHANGE_in_alterStatementSuffixExchangePartition5588);  
            stream_KW_EXCHANGE.add(KW_EXCHANGE321);


            pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixExchangePartition5590);
            partitionSpec322=partitionSpec();

            state._fsp--;

            stream_partitionSpec.add(partitionSpec322.getTree());

            KW_WITH323=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_alterStatementSuffixExchangePartition5592);  
            stream_KW_WITH.add(KW_WITH323);


            KW_TABLE324=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_alterStatementSuffixExchangePartition5594);  
            stream_KW_TABLE.add(KW_TABLE324);


            pushFollow(FOLLOW_tableName_in_alterStatementSuffixExchangePartition5598);
            exchangename=tableName();

            state._fsp--;

            stream_tableName.add(exchangename.getTree());

            // AST REWRITE
            // elements: partitionSpec, exchangename
            // token labels: 
            // rule labels: exchangename, retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_exchangename=new RewriteRuleSubtreeStream(adaptor,"rule exchangename",exchangename!=null?exchangename.tree:null);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1218:5: -> ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1218:8: ^( TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_EXCHANGEPARTITION, "TOK_ALTERTABLE_EXCHANGEPARTITION")
                , root_1);

                adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                adaptor.addChild(root_1, stream_exchangename.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixExchangePartition"


    public static class alterStatementSuffixProtectMode_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixProtectMode"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1221:1: alterStatementSuffixProtectMode : alterProtectMode -> ^( TOK_ALTERTABLE_PROTECTMODE alterProtectMode ) ;
    public final HiveParser.alterStatementSuffixProtectMode_return alterStatementSuffixProtectMode() throws RecognitionException {
        HiveParser.alterStatementSuffixProtectMode_return retval = new HiveParser.alterStatementSuffixProtectMode_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.alterProtectMode_return alterProtectMode325 =null;


        RewriteRuleSubtreeStream stream_alterProtectMode=new RewriteRuleSubtreeStream(adaptor,"rule alterProtectMode");
         pushMsg("alter partition protect mode statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1224:5: ( alterProtectMode -> ^( TOK_ALTERTABLE_PROTECTMODE alterProtectMode ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1224:7: alterProtectMode
            {
            pushFollow(FOLLOW_alterProtectMode_in_alterStatementSuffixProtectMode5640);
            alterProtectMode325=alterProtectMode();

            state._fsp--;

            stream_alterProtectMode.add(alterProtectMode325.getTree());

            // AST REWRITE
            // elements: alterProtectMode
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1225:5: -> ^( TOK_ALTERTABLE_PROTECTMODE alterProtectMode )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1225:8: ^( TOK_ALTERTABLE_PROTECTMODE alterProtectMode )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_PROTECTMODE, "TOK_ALTERTABLE_PROTECTMODE")
                , root_1);

                adaptor.addChild(root_1, stream_alterProtectMode.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixProtectMode"


    public static class alterStatementSuffixRenamePart_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixRenamePart"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1228:1: alterStatementSuffixRenamePart : KW_RENAME KW_TO partitionSpec -> ^( TOK_ALTERTABLE_RENAMEPART partitionSpec ) ;
    public final HiveParser.alterStatementSuffixRenamePart_return alterStatementSuffixRenamePart() throws RecognitionException {
        HiveParser.alterStatementSuffixRenamePart_return retval = new HiveParser.alterStatementSuffixRenamePart_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_RENAME326=null;
        Token KW_TO327=null;
        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec328 =null;


        CommonTree KW_RENAME326_tree=null;
        CommonTree KW_TO327_tree=null;
        RewriteRuleTokenStream stream_KW_RENAME=new RewriteRuleTokenStream(adaptor,"token KW_RENAME");
        RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("alter table rename partition statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1231:5: ( KW_RENAME KW_TO partitionSpec -> ^( TOK_ALTERTABLE_RENAMEPART partitionSpec ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1231:7: KW_RENAME KW_TO partitionSpec
            {
            KW_RENAME326=(Token)match(input,KW_RENAME,FOLLOW_KW_RENAME_in_alterStatementSuffixRenamePart5679);  
            stream_KW_RENAME.add(KW_RENAME326);


            KW_TO327=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_alterStatementSuffixRenamePart5681);  
            stream_KW_TO.add(KW_TO327);


            pushFollow(FOLLOW_partitionSpec_in_alterStatementSuffixRenamePart5683);
            partitionSpec328=partitionSpec();

            state._fsp--;

            stream_partitionSpec.add(partitionSpec328.getTree());

            // AST REWRITE
            // elements: partitionSpec
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1232:5: -> ^( TOK_ALTERTABLE_RENAMEPART partitionSpec )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1232:7: ^( TOK_ALTERTABLE_RENAMEPART partitionSpec )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_RENAMEPART, "TOK_ALTERTABLE_RENAMEPART")
                , root_1);

                adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixRenamePart"


    public static class alterStatementSuffixStatsPart_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixStatsPart"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1235:1: alterStatementSuffixStatsPart : KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) ;
    public final HiveParser.alterStatementSuffixStatsPart_return alterStatementSuffixStatsPart() throws RecognitionException {
        HiveParser.alterStatementSuffixStatsPart_return retval = new HiveParser.alterStatementSuffixStatsPart_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_UPDATE329=null;
        Token KW_STATISTICS330=null;
        Token KW_FOR331=null;
        Token KW_COLUMN332=null;
        Token KW_SET333=null;
        Token KW_COMMENT335=null;
        HiveParser_IdentifiersParser.identifier_return colName =null;

        HiveParser.tableProperties_return tableProperties334 =null;


        CommonTree comment_tree=null;
        CommonTree KW_UPDATE329_tree=null;
        CommonTree KW_STATISTICS330_tree=null;
        CommonTree KW_FOR331_tree=null;
        CommonTree KW_COLUMN332_tree=null;
        CommonTree KW_SET333_tree=null;
        CommonTree KW_COMMENT335_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
        RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
        RewriteRuleTokenStream stream_KW_COLUMN=new RewriteRuleTokenStream(adaptor,"token KW_COLUMN");
        RewriteRuleTokenStream stream_KW_STATISTICS=new RewriteRuleTokenStream(adaptor,"token KW_STATISTICS");
        RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("alter table stats partition statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1238:5: ( KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1238:7: KW_UPDATE KW_STATISTICS KW_FOR ( KW_COLUMN )? colName= identifier KW_SET tableProperties ( KW_COMMENT comment= StringLiteral )?
            {
            KW_UPDATE329=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_alterStatementSuffixStatsPart5721);  
            stream_KW_UPDATE.add(KW_UPDATE329);


            KW_STATISTICS330=(Token)match(input,KW_STATISTICS,FOLLOW_KW_STATISTICS_in_alterStatementSuffixStatsPart5723);  
            stream_KW_STATISTICS.add(KW_STATISTICS330);


            KW_FOR331=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_alterStatementSuffixStatsPart5725);  
            stream_KW_FOR.add(KW_FOR331);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1238:38: ( KW_COLUMN )?
            int alt86=2;
            switch ( input.LA(1) ) {
                case KW_COLUMN:
                    {
                    alt86=1;
                    }
                    break;
            }

            switch (alt86) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1238:38: KW_COLUMN
                    {
                    KW_COLUMN332=(Token)match(input,KW_COLUMN,FOLLOW_KW_COLUMN_in_alterStatementSuffixStatsPart5727);  
                    stream_KW_COLUMN.add(KW_COLUMN332);


                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_alterStatementSuffixStatsPart5732);
            colName=identifier();

            state._fsp--;

            stream_identifier.add(colName.getTree());

            KW_SET333=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_alterStatementSuffixStatsPart5734);  
            stream_KW_SET.add(KW_SET333);


            pushFollow(FOLLOW_tableProperties_in_alterStatementSuffixStatsPart5736);
            tableProperties334=tableProperties();

            state._fsp--;

            stream_tableProperties.add(tableProperties334.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1238:91: ( KW_COMMENT comment= StringLiteral )?
            int alt87=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt87=1;
                    }
                    break;
            }

            switch (alt87) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1238:92: KW_COMMENT comment= StringLiteral
                    {
                    KW_COMMENT335=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_alterStatementSuffixStatsPart5739);  
                    stream_KW_COMMENT.add(KW_COMMENT335);


                    comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixStatsPart5743);  
                    stream_StringLiteral.add(comment);


                    }
                    break;

            }


            // AST REWRITE
            // elements: colName, tableProperties, comment
            // token labels: comment
            // rule labels: retval, colName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1239:5: -> ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1239:7: ^( TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties ( $comment)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_UPDATECOLSTATS, "TOK_ALTERTABLE_UPDATECOLSTATS")
                , root_1);

                adaptor.addChild(root_1, stream_colName.nextTree());

                adaptor.addChild(root_1, stream_tableProperties.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1239:65: ( $comment)?
                if ( stream_comment.hasNext() ) {
                    adaptor.addChild(root_1, stream_comment.nextNode());

                }
                stream_comment.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixStatsPart"


    public static class alterStatementSuffixMergeFiles_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixMergeFiles"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1242:1: alterStatementSuffixMergeFiles : KW_CONCATENATE -> ^( TOK_ALTERTABLE_MERGEFILES ) ;
    public final HiveParser.alterStatementSuffixMergeFiles_return alterStatementSuffixMergeFiles() throws RecognitionException {
        HiveParser.alterStatementSuffixMergeFiles_return retval = new HiveParser.alterStatementSuffixMergeFiles_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_CONCATENATE336=null;

        CommonTree KW_CONCATENATE336_tree=null;
        RewriteRuleTokenStream stream_KW_CONCATENATE=new RewriteRuleTokenStream(adaptor,"token KW_CONCATENATE");

         pushMsg("", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1245:5: ( KW_CONCATENATE -> ^( TOK_ALTERTABLE_MERGEFILES ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1245:7: KW_CONCATENATE
            {
            KW_CONCATENATE336=(Token)match(input,KW_CONCATENATE,FOLLOW_KW_CONCATENATE_in_alterStatementSuffixMergeFiles5790);  
            stream_KW_CONCATENATE.add(KW_CONCATENATE336);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1246:5: -> ^( TOK_ALTERTABLE_MERGEFILES )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1246:8: ^( TOK_ALTERTABLE_MERGEFILES )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_MERGEFILES, "TOK_ALTERTABLE_MERGEFILES")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixMergeFiles"


    public static class alterProtectMode_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterProtectMode"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1249:1: alterProtectMode : ( KW_ENABLE alterProtectModeMode -> ^( TOK_ENABLE alterProtectModeMode ) | KW_DISABLE alterProtectModeMode -> ^( TOK_DISABLE alterProtectModeMode ) );
    public final HiveParser.alterProtectMode_return alterProtectMode() throws RecognitionException {
        HiveParser.alterProtectMode_return retval = new HiveParser.alterProtectMode_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ENABLE337=null;
        Token KW_DISABLE339=null;
        HiveParser.alterProtectModeMode_return alterProtectModeMode338 =null;

        HiveParser.alterProtectModeMode_return alterProtectModeMode340 =null;


        CommonTree KW_ENABLE337_tree=null;
        CommonTree KW_DISABLE339_tree=null;
        RewriteRuleTokenStream stream_KW_DISABLE=new RewriteRuleTokenStream(adaptor,"token KW_DISABLE");
        RewriteRuleTokenStream stream_KW_ENABLE=new RewriteRuleTokenStream(adaptor,"token KW_ENABLE");
        RewriteRuleSubtreeStream stream_alterProtectModeMode=new RewriteRuleSubtreeStream(adaptor,"rule alterProtectModeMode");
         pushMsg("protect mode specification enable", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1252:5: ( KW_ENABLE alterProtectModeMode -> ^( TOK_ENABLE alterProtectModeMode ) | KW_DISABLE alterProtectModeMode -> ^( TOK_DISABLE alterProtectModeMode ) )
            int alt88=2;
            switch ( input.LA(1) ) {
            case KW_ENABLE:
                {
                alt88=1;
                }
                break;
            case KW_DISABLE:
                {
                alt88=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 88, 0, input);

                throw nvae;

            }

            switch (alt88) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1252:7: KW_ENABLE alterProtectModeMode
                    {
                    KW_ENABLE337=(Token)match(input,KW_ENABLE,FOLLOW_KW_ENABLE_in_alterProtectMode5827);  
                    stream_KW_ENABLE.add(KW_ENABLE337);


                    pushFollow(FOLLOW_alterProtectModeMode_in_alterProtectMode5829);
                    alterProtectModeMode338=alterProtectModeMode();

                    state._fsp--;

                    stream_alterProtectModeMode.add(alterProtectModeMode338.getTree());

                    // AST REWRITE
                    // elements: alterProtectModeMode
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1252:39: -> ^( TOK_ENABLE alterProtectModeMode )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1252:42: ^( TOK_ENABLE alterProtectModeMode )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ENABLE, "TOK_ENABLE")
                        , root_1);

                        adaptor.addChild(root_1, stream_alterProtectModeMode.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1253:7: KW_DISABLE alterProtectModeMode
                    {
                    KW_DISABLE339=(Token)match(input,KW_DISABLE,FOLLOW_KW_DISABLE_in_alterProtectMode5846);  
                    stream_KW_DISABLE.add(KW_DISABLE339);


                    pushFollow(FOLLOW_alterProtectModeMode_in_alterProtectMode5848);
                    alterProtectModeMode340=alterProtectModeMode();

                    state._fsp--;

                    stream_alterProtectModeMode.add(alterProtectModeMode340.getTree());

                    // AST REWRITE
                    // elements: alterProtectModeMode
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1253:40: -> ^( TOK_DISABLE alterProtectModeMode )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1253:43: ^( TOK_DISABLE alterProtectModeMode )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DISABLE, "TOK_DISABLE")
                        , root_1);

                        adaptor.addChild(root_1, stream_alterProtectModeMode.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterProtectMode"


    public static class alterProtectModeMode_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterProtectModeMode"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1256:1: alterProtectModeMode : ( KW_OFFLINE -> ^( TOK_OFFLINE ) | KW_NO_DROP ( KW_CASCADE )? -> ^( TOK_NO_DROP ( KW_CASCADE )? ) | KW_READONLY -> ^( TOK_READONLY ) );
    public final HiveParser.alterProtectModeMode_return alterProtectModeMode() throws RecognitionException {
        HiveParser.alterProtectModeMode_return retval = new HiveParser.alterProtectModeMode_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_OFFLINE341=null;
        Token KW_NO_DROP342=null;
        Token KW_CASCADE343=null;
        Token KW_READONLY344=null;

        CommonTree KW_OFFLINE341_tree=null;
        CommonTree KW_NO_DROP342_tree=null;
        CommonTree KW_CASCADE343_tree=null;
        CommonTree KW_READONLY344_tree=null;
        RewriteRuleTokenStream stream_KW_READONLY=new RewriteRuleTokenStream(adaptor,"token KW_READONLY");
        RewriteRuleTokenStream stream_KW_NO_DROP=new RewriteRuleTokenStream(adaptor,"token KW_NO_DROP");
        RewriteRuleTokenStream stream_KW_CASCADE=new RewriteRuleTokenStream(adaptor,"token KW_CASCADE");
        RewriteRuleTokenStream stream_KW_OFFLINE=new RewriteRuleTokenStream(adaptor,"token KW_OFFLINE");

         pushMsg("protect mode specification enable", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1259:5: ( KW_OFFLINE -> ^( TOK_OFFLINE ) | KW_NO_DROP ( KW_CASCADE )? -> ^( TOK_NO_DROP ( KW_CASCADE )? ) | KW_READONLY -> ^( TOK_READONLY ) )
            int alt90=3;
            switch ( input.LA(1) ) {
            case KW_OFFLINE:
                {
                alt90=1;
                }
                break;
            case KW_NO_DROP:
                {
                alt90=2;
                }
                break;
            case KW_READONLY:
                {
                alt90=3;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 90, 0, input);

                throw nvae;

            }

            switch (alt90) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1259:7: KW_OFFLINE
                    {
                    KW_OFFLINE341=(Token)match(input,KW_OFFLINE,FOLLOW_KW_OFFLINE_in_alterProtectModeMode5884);  
                    stream_KW_OFFLINE.add(KW_OFFLINE341);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1259:19: -> ^( TOK_OFFLINE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1259:22: ^( TOK_OFFLINE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_OFFLINE, "TOK_OFFLINE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1260:7: KW_NO_DROP ( KW_CASCADE )?
                    {
                    KW_NO_DROP342=(Token)match(input,KW_NO_DROP,FOLLOW_KW_NO_DROP_in_alterProtectModeMode5899);  
                    stream_KW_NO_DROP.add(KW_NO_DROP342);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1260:18: ( KW_CASCADE )?
                    int alt89=2;
                    switch ( input.LA(1) ) {
                        case KW_CASCADE:
                            {
                            alt89=1;
                            }
                            break;
                    }

                    switch (alt89) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1260:18: KW_CASCADE
                            {
                            KW_CASCADE343=(Token)match(input,KW_CASCADE,FOLLOW_KW_CASCADE_in_alterProtectModeMode5901);  
                            stream_KW_CASCADE.add(KW_CASCADE343);


                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: KW_CASCADE
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1260:30: -> ^( TOK_NO_DROP ( KW_CASCADE )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1260:33: ^( TOK_NO_DROP ( KW_CASCADE )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_NO_DROP, "TOK_NO_DROP")
                        , root_1);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1260:47: ( KW_CASCADE )?
                        if ( stream_KW_CASCADE.hasNext() ) {
                            adaptor.addChild(root_1, 
                            stream_KW_CASCADE.nextNode()
                            );

                        }
                        stream_KW_CASCADE.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1261:7: KW_READONLY
                    {
                    KW_READONLY344=(Token)match(input,KW_READONLY,FOLLOW_KW_READONLY_in_alterProtectModeMode5919);  
                    stream_KW_READONLY.add(KW_READONLY344);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1261:20: -> ^( TOK_READONLY )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1261:23: ^( TOK_READONLY )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_READONLY, "TOK_READONLY")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterProtectModeMode"


    public static class alterStatementSuffixBucketNum_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixBucketNum"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1264:1: alterStatementSuffixBucketNum : KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $num) ;
    public final HiveParser.alterStatementSuffixBucketNum_return alterStatementSuffixBucketNum() throws RecognitionException {
        HiveParser.alterStatementSuffixBucketNum_return retval = new HiveParser.alterStatementSuffixBucketNum_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token num=null;
        Token KW_INTO345=null;
        Token KW_BUCKETS346=null;

        CommonTree num_tree=null;
        CommonTree KW_INTO345_tree=null;
        CommonTree KW_BUCKETS346_tree=null;
        RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
        RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
        RewriteRuleTokenStream stream_KW_BUCKETS=new RewriteRuleTokenStream(adaptor,"token KW_BUCKETS");

         pushMsg("", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1267:5: ( KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $num) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1267:7: KW_INTO num= Number KW_BUCKETS
            {
            KW_INTO345=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_alterStatementSuffixBucketNum5953);  
            stream_KW_INTO.add(KW_INTO345);


            num=(Token)match(input,Number,FOLLOW_Number_in_alterStatementSuffixBucketNum5957);  
            stream_Number.add(num);


            KW_BUCKETS346=(Token)match(input,KW_BUCKETS,FOLLOW_KW_BUCKETS_in_alterStatementSuffixBucketNum5959);  
            stream_KW_BUCKETS.add(KW_BUCKETS346);


            // AST REWRITE
            // elements: num
            // token labels: num
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_num=new RewriteRuleTokenStream(adaptor,"token num",num);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1268:5: -> ^( TOK_ALTERTABLE_BUCKETS $num)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1268:8: ^( TOK_ALTERTABLE_BUCKETS $num)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_BUCKETS, "TOK_ALTERTABLE_BUCKETS")
                , root_1);

                adaptor.addChild(root_1, stream_num.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixBucketNum"


    public static class alterStatementSuffixCompact_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "alterStatementSuffixCompact"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1271:1: alterStatementSuffixCompact : KW_COMPACT compactType= StringLiteral -> ^( TOK_ALTERTABLE_COMPACT $compactType) ;
    public final HiveParser.alterStatementSuffixCompact_return alterStatementSuffixCompact() throws RecognitionException {
        HiveParser.alterStatementSuffixCompact_return retval = new HiveParser.alterStatementSuffixCompact_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token compactType=null;
        Token KW_COMPACT347=null;

        CommonTree compactType_tree=null;
        CommonTree KW_COMPACT347_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMPACT=new RewriteRuleTokenStream(adaptor,"token KW_COMPACT");

         msgs.push("compaction request"); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1274:5: ( KW_COMPACT compactType= StringLiteral -> ^( TOK_ALTERTABLE_COMPACT $compactType) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1274:7: KW_COMPACT compactType= StringLiteral
            {
            KW_COMPACT347=(Token)match(input,KW_COMPACT,FOLLOW_KW_COMPACT_in_alterStatementSuffixCompact5999);  
            stream_KW_COMPACT.add(KW_COMPACT347);


            compactType=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_alterStatementSuffixCompact6003);  
            stream_StringLiteral.add(compactType);


            // AST REWRITE
            // elements: compactType
            // token labels: compactType
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_compactType=new RewriteRuleTokenStream(adaptor,"token compactType",compactType);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1275:5: -> ^( TOK_ALTERTABLE_COMPACT $compactType)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1275:8: ^( TOK_ALTERTABLE_COMPACT $compactType)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_COMPACT, "TOK_ALTERTABLE_COMPACT")
                , root_1);

                adaptor.addChild(root_1, stream_compactType.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             msgs.pop(); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "alterStatementSuffixCompact"


    public static class fileFormat_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "fileFormat"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1279:1: fileFormat : ( KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral KW_SERDE serdeCls= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? ) |genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) );
    public final HiveParser.fileFormat_return fileFormat() throws RecognitionException {
        HiveParser.fileFormat_return retval = new HiveParser.fileFormat_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token inFmt=null;
        Token outFmt=null;
        Token serdeCls=null;
        Token inDriver=null;
        Token outDriver=null;
        Token KW_INPUTFORMAT348=null;
        Token KW_OUTPUTFORMAT349=null;
        Token KW_SERDE350=null;
        Token KW_INPUTDRIVER351=null;
        Token KW_OUTPUTDRIVER352=null;
        HiveParser_IdentifiersParser.identifier_return genericSpec =null;


        CommonTree inFmt_tree=null;
        CommonTree outFmt_tree=null;
        CommonTree serdeCls_tree=null;
        CommonTree inDriver_tree=null;
        CommonTree outDriver_tree=null;
        CommonTree KW_INPUTFORMAT348_tree=null;
        CommonTree KW_OUTPUTFORMAT349_tree=null;
        CommonTree KW_SERDE350_tree=null;
        CommonTree KW_INPUTDRIVER351_tree=null;
        CommonTree KW_OUTPUTDRIVER352_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_INPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_INPUTDRIVER");
        RewriteRuleTokenStream stream_KW_INPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_INPUTFORMAT");
        RewriteRuleTokenStream stream_KW_OUTPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTFORMAT");
        RewriteRuleTokenStream stream_KW_OUTPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTDRIVER");
        RewriteRuleTokenStream stream_KW_SERDE=new RewriteRuleTokenStream(adaptor,"token KW_SERDE");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("file format specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1282:5: ( KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral KW_SERDE serdeCls= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? ) |genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) )
            int alt92=2;
            switch ( input.LA(1) ) {
            case KW_INPUTFORMAT:
                {
                switch ( input.LA(2) ) {
                case StringLiteral:
                    {
                    alt92=1;
                    }
                    break;
                case EOF:
                    {
                    alt92=2;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 92, 1, input);

                    throw nvae;

                }

                }
                break;
            case Identifier:
            case KW_ADD:
            case KW_ADMIN:
            case KW_AFTER:
            case KW_ALL:
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_ARCHIVE:
            case KW_ARRAY:
            case KW_AS:
            case KW_ASC:
            case KW_AUTHORIZATION:
            case KW_BEFORE:
            case KW_BETWEEN:
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_BOTH:
            case KW_BUCKET:
            case KW_BUCKETS:
            case KW_BY:
            case KW_CASCADE:
            case KW_CHANGE:
            case KW_CLUSTER:
            case KW_CLUSTERED:
            case KW_CLUSTERSTATUS:
            case KW_COLLECTION:
            case KW_COLUMNS:
            case KW_COMMENT:
            case KW_COMPACT:
            case KW_COMPACTIONS:
            case KW_COMPUTE:
            case KW_CONCATENATE:
            case KW_CONTINUE:
            case KW_CREATE:
            case KW_CUBE:
            case KW_CURSOR:
            case KW_DATA:
            case KW_DATABASES:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DBPROPERTIES:
            case KW_DECIMAL:
            case KW_DEFAULT:
            case KW_DEFERRED:
            case KW_DEFINED:
            case KW_DELETE:
            case KW_DELIMITED:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DIRECTORIES:
            case KW_DIRECTORY:
            case KW_DISABLE:
            case KW_DISTRIBUTE:
            case KW_DOUBLE:
            case KW_DROP:
            case KW_ELEM_TYPE:
            case KW_ENABLE:
            case KW_ESCAPED:
            case KW_EXCLUSIVE:
            case KW_EXISTS:
            case KW_EXPLAIN:
            case KW_EXPORT:
            case KW_EXTERNAL:
            case KW_FALSE:
            case KW_FETCH:
            case KW_FIELDS:
            case KW_FILE:
            case KW_FILEFORMAT:
            case KW_FIRST:
            case KW_FLOAT:
            case KW_FOR:
            case KW_FORMAT:
            case KW_FORMATTED:
            case KW_FULL:
            case KW_FUNCTIONS:
            case KW_GRANT:
            case KW_GROUP:
            case KW_GROUPING:
            case KW_HOLD_DDLTIME:
            case KW_IDXPROPERTIES:
            case KW_IGNORE:
            case KW_IMPORT:
            case KW_IN:
            case KW_INDEX:
            case KW_INDEXES:
            case KW_INNER:
            case KW_INPATH:
            case KW_INPUTDRIVER:
            case KW_INSERT:
            case KW_INT:
            case KW_INTERSECT:
            case KW_INTO:
            case KW_IS:
            case KW_ITEMS:
            case KW_JAR:
            case KW_KEYS:
            case KW_KEY_TYPE:
            case KW_LATERAL:
            case KW_LEFT:
            case KW_LIKE:
            case KW_LIMIT:
            case KW_LINES:
            case KW_LOAD:
            case KW_LOCAL:
            case KW_LOCATION:
            case KW_LOCK:
            case KW_LOCKS:
            case KW_LOGICAL:
            case KW_LONG:
            case KW_MAPJOIN:
            case KW_MATERIALIZED:
            case KW_MINUS:
            case KW_MSCK:
            case KW_NONE:
            case KW_NOSCAN:
            case KW_NO_DROP:
            case KW_NULL:
            case KW_OF:
            case KW_OFFLINE:
            case KW_OPTION:
            case KW_ORDER:
            case KW_OUT:
            case KW_OUTER:
            case KW_OUTPUTDRIVER:
            case KW_OUTPUTFORMAT:
            case KW_OVERWRITE:
            case KW_OWNER:
            case KW_PARTITION:
            case KW_PARTITIONED:
            case KW_PARTITIONS:
            case KW_PERCENT:
            case KW_PLUS:
            case KW_PRETTY:
            case KW_PRINCIPALS:
            case KW_PROCEDURE:
            case KW_PROTECTION:
            case KW_PURGE:
            case KW_RANGE:
            case KW_READ:
            case KW_READONLY:
            case KW_READS:
            case KW_REBUILD:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REGEXP:
            case KW_RENAME:
            case KW_REPAIR:
            case KW_REPLACE:
            case KW_RESTRICT:
            case KW_REVOKE:
            case KW_REWRITE:
            case KW_RIGHT:
            case KW_RLIKE:
            case KW_ROLE:
            case KW_ROLES:
            case KW_ROLLUP:
            case KW_ROW:
            case KW_ROWS:
            case KW_SCHEMA:
            case KW_SCHEMAS:
            case KW_SEMI:
            case KW_SERDE:
            case KW_SERDEPROPERTIES:
            case KW_SET:
            case KW_SETS:
            case KW_SHARED:
            case KW_SHOW:
            case KW_SHOW_DATABASE:
            case KW_SKEWED:
            case KW_SMALLINT:
            case KW_SORT:
            case KW_SORTED:
            case KW_SSL:
            case KW_STATISTICS:
            case KW_STORED:
            case KW_STREAMTABLE:
            case KW_STRING:
            case KW_STRUCT:
            case KW_TABLE:
            case KW_TABLES:
            case KW_TBLPROPERTIES:
            case KW_TEMPORARY:
            case KW_TERMINATED:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_TO:
            case KW_TOUCH:
            case KW_TRANSACTIONS:
            case KW_TRIGGER:
            case KW_TRUE:
            case KW_TRUNCATE:
            case KW_UNARCHIVE:
            case KW_UNDO:
            case KW_UNION:
            case KW_UNIONTYPE:
            case KW_UNLOCK:
            case KW_UNSET:
            case KW_UNSIGNED:
            case KW_UPDATE:
            case KW_USE:
            case KW_USER:
            case KW_USING:
            case KW_UTC:
            case KW_UTCTIMESTAMP:
            case KW_VALUES:
            case KW_VALUE_TYPE:
            case KW_VIEW:
            case KW_WHILE:
            case KW_WITH:
                {
                alt92=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 92, 0, input);

                throw nvae;

            }

            switch (alt92) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1282:7: KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral KW_SERDE serdeCls= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
                    {
                    KW_INPUTFORMAT348=(Token)match(input,KW_INPUTFORMAT,FOLLOW_KW_INPUTFORMAT_in_fileFormat6044);  
                    stream_KW_INPUTFORMAT.add(KW_INPUTFORMAT348);


                    inFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6048);  
                    stream_StringLiteral.add(inFmt);


                    KW_OUTPUTFORMAT349=(Token)match(input,KW_OUTPUTFORMAT,FOLLOW_KW_OUTPUTFORMAT_in_fileFormat6050);  
                    stream_KW_OUTPUTFORMAT.add(KW_OUTPUTFORMAT349);


                    outFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6054);  
                    stream_StringLiteral.add(outFmt);


                    KW_SERDE350=(Token)match(input,KW_SERDE,FOLLOW_KW_SERDE_in_fileFormat6056);  
                    stream_KW_SERDE.add(KW_SERDE350);


                    serdeCls=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6060);  
                    stream_StringLiteral.add(serdeCls);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1282:111: ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
                    int alt91=2;
                    switch ( input.LA(1) ) {
                        case KW_INPUTDRIVER:
                            {
                            alt91=1;
                            }
                            break;
                    }

                    switch (alt91) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1282:112: KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral
                            {
                            KW_INPUTDRIVER351=(Token)match(input,KW_INPUTDRIVER,FOLLOW_KW_INPUTDRIVER_in_fileFormat6063);  
                            stream_KW_INPUTDRIVER.add(KW_INPUTDRIVER351);


                            inDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6067);  
                            stream_StringLiteral.add(inDriver);


                            KW_OUTPUTDRIVER352=(Token)match(input,KW_OUTPUTDRIVER,FOLLOW_KW_OUTPUTDRIVER_in_fileFormat6069);  
                            stream_KW_OUTPUTDRIVER.add(KW_OUTPUTDRIVER352);


                            outDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_fileFormat6073);  
                            stream_StringLiteral.add(outDriver);


                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: outFmt, outDriver, inFmt, inDriver, serdeCls
                    // token labels: outDriver, outFmt, inDriver, serdeCls, inFmt
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_outDriver=new RewriteRuleTokenStream(adaptor,"token outDriver",outDriver);
                    RewriteRuleTokenStream stream_outFmt=new RewriteRuleTokenStream(adaptor,"token outFmt",outFmt);
                    RewriteRuleTokenStream stream_inDriver=new RewriteRuleTokenStream(adaptor,"token inDriver",inDriver);
                    RewriteRuleTokenStream stream_serdeCls=new RewriteRuleTokenStream(adaptor,"token serdeCls",serdeCls);
                    RewriteRuleTokenStream stream_inFmt=new RewriteRuleTokenStream(adaptor,"token inFmt",inFmt);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1283:7: -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:10: ^( TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls ( $inDriver)? ( $outDriver)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLEFILEFORMAT, "TOK_TABLEFILEFORMAT")
                        , root_1);

                        adaptor.addChild(root_1, stream_inFmt.nextNode());

                        adaptor.addChild(root_1, stream_outFmt.nextNode());

                        adaptor.addChild(root_1, stream_serdeCls.nextNode());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:58: ( $inDriver)?
                        if ( stream_inDriver.hasNext() ) {
                            adaptor.addChild(root_1, stream_inDriver.nextNode());

                        }
                        stream_inDriver.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1283:69: ( $outDriver)?
                        if ( stream_outDriver.hasNext() ) {
                            adaptor.addChild(root_1, stream_outDriver.nextNode());

                        }
                        stream_outDriver.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1284:7: genericSpec= identifier
                    {
                    pushFollow(FOLLOW_identifier_in_fileFormat6114);
                    genericSpec=identifier();

                    state._fsp--;

                    stream_identifier.add(genericSpec.getTree());

                    // AST REWRITE
                    // elements: genericSpec
                    // token labels: 
                    // rule labels: retval, genericSpec
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_genericSpec=new RewriteRuleSubtreeStream(adaptor,"rule genericSpec",genericSpec!=null?genericSpec.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1284:30: -> ^( TOK_FILEFORMAT_GENERIC $genericSpec)
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1284:33: ^( TOK_FILEFORMAT_GENERIC $genericSpec)
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_FILEFORMAT_GENERIC, "TOK_FILEFORMAT_GENERIC")
                        , root_1);

                        adaptor.addChild(root_1, stream_genericSpec.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "fileFormat"


    public static class tabTypeExpr_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tabTypeExpr"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1287:1: tabTypeExpr : identifier ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )* ;
    public final HiveParser.tabTypeExpr_return tabTypeExpr() throws RecognitionException {
        HiveParser.tabTypeExpr_return retval = new HiveParser.tabTypeExpr_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token DOT354=null;
        Token KW_ELEM_TYPE355=null;
        Token KW_KEY_TYPE356=null;
        Token KW_VALUE_TYPE357=null;
        HiveParser_IdentifiersParser.identifier_return identifier353 =null;

        HiveParser_IdentifiersParser.identifier_return identifier358 =null;


        CommonTree DOT354_tree=null;
        CommonTree KW_ELEM_TYPE355_tree=null;
        CommonTree KW_KEY_TYPE356_tree=null;
        CommonTree KW_VALUE_TYPE357_tree=null;

         pushMsg("specifying table types", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:4: ( identifier ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )* )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:6: identifier ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )*
            {
            root_0 = (CommonTree)adaptor.nil();


            pushFollow(FOLLOW_identifier_in_tabTypeExpr6150);
            identifier353=identifier();

            state._fsp--;

            adaptor.addChild(root_0, identifier353.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:17: ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )*
            loop94:
            do {
                int alt94=2;
                switch ( input.LA(1) ) {
                case DOT:
                    {
                    alt94=1;
                    }
                    break;

                }

                switch (alt94) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:18: DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier )
            	    {
            	    DOT354=(Token)match(input,DOT,FOLLOW_DOT_in_tabTypeExpr6153); 
            	    DOT354_tree = 
            	    (CommonTree)adaptor.create(DOT354)
            	    ;
            	    root_0 = (CommonTree)adaptor.becomeRoot(DOT354_tree, root_0);


            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:23: ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier )
            	    int alt93=4;
            	    switch ( input.LA(1) ) {
            	    case KW_ELEM_TYPE:
            	        {
            	        alt93=1;
            	        }
            	        break;
            	    case KW_KEY_TYPE:
            	        {
            	        alt93=2;
            	        }
            	        break;
            	    case KW_VALUE_TYPE:
            	        {
            	        alt93=3;
            	        }
            	        break;
            	    case Identifier:
            	    case KW_ADD:
            	    case KW_ADMIN:
            	    case KW_AFTER:
            	    case KW_ALL:
            	    case KW_ALTER:
            	    case KW_ANALYZE:
            	    case KW_ARCHIVE:
            	    case KW_ARRAY:
            	    case KW_AS:
            	    case KW_ASC:
            	    case KW_AUTHORIZATION:
            	    case KW_BEFORE:
            	    case KW_BETWEEN:
            	    case KW_BIGINT:
            	    case KW_BINARY:
            	    case KW_BOOLEAN:
            	    case KW_BOTH:
            	    case KW_BUCKET:
            	    case KW_BUCKETS:
            	    case KW_BY:
            	    case KW_CASCADE:
            	    case KW_CHANGE:
            	    case KW_CLUSTER:
            	    case KW_CLUSTERED:
            	    case KW_CLUSTERSTATUS:
            	    case KW_COLLECTION:
            	    case KW_COLUMNS:
            	    case KW_COMMENT:
            	    case KW_COMPACT:
            	    case KW_COMPACTIONS:
            	    case KW_COMPUTE:
            	    case KW_CONCATENATE:
            	    case KW_CONTINUE:
            	    case KW_CREATE:
            	    case KW_CUBE:
            	    case KW_CURSOR:
            	    case KW_DATA:
            	    case KW_DATABASES:
            	    case KW_DATE:
            	    case KW_DATETIME:
            	    case KW_DBPROPERTIES:
            	    case KW_DECIMAL:
            	    case KW_DEFAULT:
            	    case KW_DEFERRED:
            	    case KW_DEFINED:
            	    case KW_DELETE:
            	    case KW_DELIMITED:
            	    case KW_DEPENDENCY:
            	    case KW_DESC:
            	    case KW_DESCRIBE:
            	    case KW_DIRECTORIES:
            	    case KW_DIRECTORY:
            	    case KW_DISABLE:
            	    case KW_DISTRIBUTE:
            	    case KW_DOUBLE:
            	    case KW_DROP:
            	    case KW_ENABLE:
            	    case KW_ESCAPED:
            	    case KW_EXCLUSIVE:
            	    case KW_EXISTS:
            	    case KW_EXPLAIN:
            	    case KW_EXPORT:
            	    case KW_EXTERNAL:
            	    case KW_FALSE:
            	    case KW_FETCH:
            	    case KW_FIELDS:
            	    case KW_FILE:
            	    case KW_FILEFORMAT:
            	    case KW_FIRST:
            	    case KW_FLOAT:
            	    case KW_FOR:
            	    case KW_FORMAT:
            	    case KW_FORMATTED:
            	    case KW_FULL:
            	    case KW_FUNCTIONS:
            	    case KW_GRANT:
            	    case KW_GROUP:
            	    case KW_GROUPING:
            	    case KW_HOLD_DDLTIME:
            	    case KW_IDXPROPERTIES:
            	    case KW_IGNORE:
            	    case KW_IMPORT:
            	    case KW_IN:
            	    case KW_INDEX:
            	    case KW_INDEXES:
            	    case KW_INNER:
            	    case KW_INPATH:
            	    case KW_INPUTDRIVER:
            	    case KW_INPUTFORMAT:
            	    case KW_INSERT:
            	    case KW_INT:
            	    case KW_INTERSECT:
            	    case KW_INTO:
            	    case KW_IS:
            	    case KW_ITEMS:
            	    case KW_JAR:
            	    case KW_KEYS:
            	    case KW_LATERAL:
            	    case KW_LEFT:
            	    case KW_LIKE:
            	    case KW_LIMIT:
            	    case KW_LINES:
            	    case KW_LOAD:
            	    case KW_LOCAL:
            	    case KW_LOCATION:
            	    case KW_LOCK:
            	    case KW_LOCKS:
            	    case KW_LOGICAL:
            	    case KW_LONG:
            	    case KW_MAPJOIN:
            	    case KW_MATERIALIZED:
            	    case KW_MINUS:
            	    case KW_MSCK:
            	    case KW_NONE:
            	    case KW_NOSCAN:
            	    case KW_NO_DROP:
            	    case KW_NULL:
            	    case KW_OF:
            	    case KW_OFFLINE:
            	    case KW_OPTION:
            	    case KW_ORDER:
            	    case KW_OUT:
            	    case KW_OUTER:
            	    case KW_OUTPUTDRIVER:
            	    case KW_OUTPUTFORMAT:
            	    case KW_OVERWRITE:
            	    case KW_OWNER:
            	    case KW_PARTITION:
            	    case KW_PARTITIONED:
            	    case KW_PARTITIONS:
            	    case KW_PERCENT:
            	    case KW_PLUS:
            	    case KW_PRETTY:
            	    case KW_PRINCIPALS:
            	    case KW_PROCEDURE:
            	    case KW_PROTECTION:
            	    case KW_PURGE:
            	    case KW_RANGE:
            	    case KW_READ:
            	    case KW_READONLY:
            	    case KW_READS:
            	    case KW_REBUILD:
            	    case KW_RECORDREADER:
            	    case KW_RECORDWRITER:
            	    case KW_REGEXP:
            	    case KW_RENAME:
            	    case KW_REPAIR:
            	    case KW_REPLACE:
            	    case KW_RESTRICT:
            	    case KW_REVOKE:
            	    case KW_REWRITE:
            	    case KW_RIGHT:
            	    case KW_RLIKE:
            	    case KW_ROLE:
            	    case KW_ROLES:
            	    case KW_ROLLUP:
            	    case KW_ROW:
            	    case KW_ROWS:
            	    case KW_SCHEMA:
            	    case KW_SCHEMAS:
            	    case KW_SEMI:
            	    case KW_SERDE:
            	    case KW_SERDEPROPERTIES:
            	    case KW_SET:
            	    case KW_SETS:
            	    case KW_SHARED:
            	    case KW_SHOW:
            	    case KW_SHOW_DATABASE:
            	    case KW_SKEWED:
            	    case KW_SMALLINT:
            	    case KW_SORT:
            	    case KW_SORTED:
            	    case KW_SSL:
            	    case KW_STATISTICS:
            	    case KW_STORED:
            	    case KW_STREAMTABLE:
            	    case KW_STRING:
            	    case KW_STRUCT:
            	    case KW_TABLE:
            	    case KW_TABLES:
            	    case KW_TBLPROPERTIES:
            	    case KW_TEMPORARY:
            	    case KW_TERMINATED:
            	    case KW_TIMESTAMP:
            	    case KW_TINYINT:
            	    case KW_TO:
            	    case KW_TOUCH:
            	    case KW_TRANSACTIONS:
            	    case KW_TRIGGER:
            	    case KW_TRUE:
            	    case KW_TRUNCATE:
            	    case KW_UNARCHIVE:
            	    case KW_UNDO:
            	    case KW_UNION:
            	    case KW_UNIONTYPE:
            	    case KW_UNLOCK:
            	    case KW_UNSET:
            	    case KW_UNSIGNED:
            	    case KW_UPDATE:
            	    case KW_USE:
            	    case KW_USER:
            	    case KW_USING:
            	    case KW_UTC:
            	    case KW_UTCTIMESTAMP:
            	    case KW_VALUES:
            	    case KW_VIEW:
            	    case KW_WHILE:
            	    case KW_WITH:
            	        {
            	        alt93=4;
            	        }
            	        break;
            	    default:
            	        NoViableAltException nvae =
            	            new NoViableAltException("", 93, 0, input);

            	        throw nvae;

            	    }

            	    switch (alt93) {
            	        case 1 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:24: KW_ELEM_TYPE
            	            {
            	            KW_ELEM_TYPE355=(Token)match(input,KW_ELEM_TYPE,FOLLOW_KW_ELEM_TYPE_in_tabTypeExpr6157); 
            	            KW_ELEM_TYPE355_tree = 
            	            (CommonTree)adaptor.create(KW_ELEM_TYPE355)
            	            ;
            	            adaptor.addChild(root_0, KW_ELEM_TYPE355_tree);


            	            }
            	            break;
            	        case 2 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:39: KW_KEY_TYPE
            	            {
            	            KW_KEY_TYPE356=(Token)match(input,KW_KEY_TYPE,FOLLOW_KW_KEY_TYPE_in_tabTypeExpr6161); 
            	            KW_KEY_TYPE356_tree = 
            	            (CommonTree)adaptor.create(KW_KEY_TYPE356)
            	            ;
            	            adaptor.addChild(root_0, KW_KEY_TYPE356_tree);


            	            }
            	            break;
            	        case 3 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:53: KW_VALUE_TYPE
            	            {
            	            KW_VALUE_TYPE357=(Token)match(input,KW_VALUE_TYPE,FOLLOW_KW_VALUE_TYPE_in_tabTypeExpr6165); 
            	            KW_VALUE_TYPE357_tree = 
            	            (CommonTree)adaptor.create(KW_VALUE_TYPE357)
            	            ;
            	            adaptor.addChild(root_0, KW_VALUE_TYPE357_tree);


            	            }
            	            break;
            	        case 4 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1291:69: identifier
            	            {
            	            pushFollow(FOLLOW_identifier_in_tabTypeExpr6169);
            	            identifier358=identifier();

            	            state._fsp--;

            	            adaptor.addChild(root_0, identifier358.getTree());

            	            }
            	            break;

            	    }


            	    }
            	    break;

            	default :
            	    break loop94;
                }
            } while (true);


            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tabTypeExpr"


    public static class descTabTypeExpr_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "descTabTypeExpr"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1294:1: descTabTypeExpr : identifier ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )* ( identifier )? ;
    public final HiveParser.descTabTypeExpr_return descTabTypeExpr() throws RecognitionException {
        HiveParser.descTabTypeExpr_return retval = new HiveParser.descTabTypeExpr_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token DOT360=null;
        Token KW_ELEM_TYPE361=null;
        Token KW_KEY_TYPE362=null;
        Token KW_VALUE_TYPE363=null;
        HiveParser_IdentifiersParser.identifier_return identifier359 =null;

        HiveParser_IdentifiersParser.identifier_return identifier364 =null;

        HiveParser_IdentifiersParser.identifier_return identifier365 =null;


        CommonTree DOT360_tree=null;
        CommonTree KW_ELEM_TYPE361_tree=null;
        CommonTree KW_KEY_TYPE362_tree=null;
        CommonTree KW_VALUE_TYPE363_tree=null;

         pushMsg("specifying describe table types", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:4: ( identifier ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )* ( identifier )? )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:6: identifier ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )* ( identifier )?
            {
            root_0 = (CommonTree)adaptor.nil();


            pushFollow(FOLLOW_identifier_in_descTabTypeExpr6198);
            identifier359=identifier();

            state._fsp--;

            adaptor.addChild(root_0, identifier359.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:17: ( DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier ) )*
            loop96:
            do {
                int alt96=2;
                switch ( input.LA(1) ) {
                case DOT:
                    {
                    alt96=1;
                    }
                    break;

                }

                switch (alt96) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:18: DOT ^ ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier )
            	    {
            	    DOT360=(Token)match(input,DOT,FOLLOW_DOT_in_descTabTypeExpr6201); 
            	    DOT360_tree = 
            	    (CommonTree)adaptor.create(DOT360)
            	    ;
            	    root_0 = (CommonTree)adaptor.becomeRoot(DOT360_tree, root_0);


            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:23: ( KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier )
            	    int alt95=4;
            	    switch ( input.LA(1) ) {
            	    case KW_ELEM_TYPE:
            	        {
            	        alt95=1;
            	        }
            	        break;
            	    case KW_KEY_TYPE:
            	        {
            	        alt95=2;
            	        }
            	        break;
            	    case KW_VALUE_TYPE:
            	        {
            	        alt95=3;
            	        }
            	        break;
            	    case Identifier:
            	    case KW_ADD:
            	    case KW_ADMIN:
            	    case KW_AFTER:
            	    case KW_ALL:
            	    case KW_ALTER:
            	    case KW_ANALYZE:
            	    case KW_ARCHIVE:
            	    case KW_ARRAY:
            	    case KW_AS:
            	    case KW_ASC:
            	    case KW_AUTHORIZATION:
            	    case KW_BEFORE:
            	    case KW_BETWEEN:
            	    case KW_BIGINT:
            	    case KW_BINARY:
            	    case KW_BOOLEAN:
            	    case KW_BOTH:
            	    case KW_BUCKET:
            	    case KW_BUCKETS:
            	    case KW_BY:
            	    case KW_CASCADE:
            	    case KW_CHANGE:
            	    case KW_CLUSTER:
            	    case KW_CLUSTERED:
            	    case KW_CLUSTERSTATUS:
            	    case KW_COLLECTION:
            	    case KW_COLUMNS:
            	    case KW_COMMENT:
            	    case KW_COMPACT:
            	    case KW_COMPACTIONS:
            	    case KW_COMPUTE:
            	    case KW_CONCATENATE:
            	    case KW_CONTINUE:
            	    case KW_CREATE:
            	    case KW_CUBE:
            	    case KW_CURSOR:
            	    case KW_DATA:
            	    case KW_DATABASES:
            	    case KW_DATE:
            	    case KW_DATETIME:
            	    case KW_DBPROPERTIES:
            	    case KW_DECIMAL:
            	    case KW_DEFAULT:
            	    case KW_DEFERRED:
            	    case KW_DEFINED:
            	    case KW_DELETE:
            	    case KW_DELIMITED:
            	    case KW_DEPENDENCY:
            	    case KW_DESC:
            	    case KW_DESCRIBE:
            	    case KW_DIRECTORIES:
            	    case KW_DIRECTORY:
            	    case KW_DISABLE:
            	    case KW_DISTRIBUTE:
            	    case KW_DOUBLE:
            	    case KW_DROP:
            	    case KW_ENABLE:
            	    case KW_ESCAPED:
            	    case KW_EXCLUSIVE:
            	    case KW_EXISTS:
            	    case KW_EXPLAIN:
            	    case KW_EXPORT:
            	    case KW_EXTERNAL:
            	    case KW_FALSE:
            	    case KW_FETCH:
            	    case KW_FIELDS:
            	    case KW_FILE:
            	    case KW_FILEFORMAT:
            	    case KW_FIRST:
            	    case KW_FLOAT:
            	    case KW_FOR:
            	    case KW_FORMAT:
            	    case KW_FORMATTED:
            	    case KW_FULL:
            	    case KW_FUNCTIONS:
            	    case KW_GRANT:
            	    case KW_GROUP:
            	    case KW_GROUPING:
            	    case KW_HOLD_DDLTIME:
            	    case KW_IDXPROPERTIES:
            	    case KW_IGNORE:
            	    case KW_IMPORT:
            	    case KW_IN:
            	    case KW_INDEX:
            	    case KW_INDEXES:
            	    case KW_INNER:
            	    case KW_INPATH:
            	    case KW_INPUTDRIVER:
            	    case KW_INPUTFORMAT:
            	    case KW_INSERT:
            	    case KW_INT:
            	    case KW_INTERSECT:
            	    case KW_INTO:
            	    case KW_IS:
            	    case KW_ITEMS:
            	    case KW_JAR:
            	    case KW_KEYS:
            	    case KW_LATERAL:
            	    case KW_LEFT:
            	    case KW_LIKE:
            	    case KW_LIMIT:
            	    case KW_LINES:
            	    case KW_LOAD:
            	    case KW_LOCAL:
            	    case KW_LOCATION:
            	    case KW_LOCK:
            	    case KW_LOCKS:
            	    case KW_LOGICAL:
            	    case KW_LONG:
            	    case KW_MAPJOIN:
            	    case KW_MATERIALIZED:
            	    case KW_MINUS:
            	    case KW_MSCK:
            	    case KW_NONE:
            	    case KW_NOSCAN:
            	    case KW_NO_DROP:
            	    case KW_NULL:
            	    case KW_OF:
            	    case KW_OFFLINE:
            	    case KW_OPTION:
            	    case KW_ORDER:
            	    case KW_OUT:
            	    case KW_OUTER:
            	    case KW_OUTPUTDRIVER:
            	    case KW_OUTPUTFORMAT:
            	    case KW_OVERWRITE:
            	    case KW_OWNER:
            	    case KW_PARTITION:
            	    case KW_PARTITIONED:
            	    case KW_PARTITIONS:
            	    case KW_PERCENT:
            	    case KW_PLUS:
            	    case KW_PRETTY:
            	    case KW_PRINCIPALS:
            	    case KW_PROCEDURE:
            	    case KW_PROTECTION:
            	    case KW_PURGE:
            	    case KW_RANGE:
            	    case KW_READ:
            	    case KW_READONLY:
            	    case KW_READS:
            	    case KW_REBUILD:
            	    case KW_RECORDREADER:
            	    case KW_RECORDWRITER:
            	    case KW_REGEXP:
            	    case KW_RENAME:
            	    case KW_REPAIR:
            	    case KW_REPLACE:
            	    case KW_RESTRICT:
            	    case KW_REVOKE:
            	    case KW_REWRITE:
            	    case KW_RIGHT:
            	    case KW_RLIKE:
            	    case KW_ROLE:
            	    case KW_ROLES:
            	    case KW_ROLLUP:
            	    case KW_ROW:
            	    case KW_ROWS:
            	    case KW_SCHEMA:
            	    case KW_SCHEMAS:
            	    case KW_SEMI:
            	    case KW_SERDE:
            	    case KW_SERDEPROPERTIES:
            	    case KW_SET:
            	    case KW_SETS:
            	    case KW_SHARED:
            	    case KW_SHOW:
            	    case KW_SHOW_DATABASE:
            	    case KW_SKEWED:
            	    case KW_SMALLINT:
            	    case KW_SORT:
            	    case KW_SORTED:
            	    case KW_SSL:
            	    case KW_STATISTICS:
            	    case KW_STORED:
            	    case KW_STREAMTABLE:
            	    case KW_STRING:
            	    case KW_STRUCT:
            	    case KW_TABLE:
            	    case KW_TABLES:
            	    case KW_TBLPROPERTIES:
            	    case KW_TEMPORARY:
            	    case KW_TERMINATED:
            	    case KW_TIMESTAMP:
            	    case KW_TINYINT:
            	    case KW_TO:
            	    case KW_TOUCH:
            	    case KW_TRANSACTIONS:
            	    case KW_TRIGGER:
            	    case KW_TRUE:
            	    case KW_TRUNCATE:
            	    case KW_UNARCHIVE:
            	    case KW_UNDO:
            	    case KW_UNION:
            	    case KW_UNIONTYPE:
            	    case KW_UNLOCK:
            	    case KW_UNSET:
            	    case KW_UNSIGNED:
            	    case KW_UPDATE:
            	    case KW_USE:
            	    case KW_USER:
            	    case KW_USING:
            	    case KW_UTC:
            	    case KW_UTCTIMESTAMP:
            	    case KW_VALUES:
            	    case KW_VIEW:
            	    case KW_WHILE:
            	    case KW_WITH:
            	        {
            	        alt95=4;
            	        }
            	        break;
            	    default:
            	        NoViableAltException nvae =
            	            new NoViableAltException("", 95, 0, input);

            	        throw nvae;

            	    }

            	    switch (alt95) {
            	        case 1 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:24: KW_ELEM_TYPE
            	            {
            	            KW_ELEM_TYPE361=(Token)match(input,KW_ELEM_TYPE,FOLLOW_KW_ELEM_TYPE_in_descTabTypeExpr6205); 
            	            KW_ELEM_TYPE361_tree = 
            	            (CommonTree)adaptor.create(KW_ELEM_TYPE361)
            	            ;
            	            adaptor.addChild(root_0, KW_ELEM_TYPE361_tree);


            	            }
            	            break;
            	        case 2 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:39: KW_KEY_TYPE
            	            {
            	            KW_KEY_TYPE362=(Token)match(input,KW_KEY_TYPE,FOLLOW_KW_KEY_TYPE_in_descTabTypeExpr6209); 
            	            KW_KEY_TYPE362_tree = 
            	            (CommonTree)adaptor.create(KW_KEY_TYPE362)
            	            ;
            	            adaptor.addChild(root_0, KW_KEY_TYPE362_tree);


            	            }
            	            break;
            	        case 3 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:53: KW_VALUE_TYPE
            	            {
            	            KW_VALUE_TYPE363=(Token)match(input,KW_VALUE_TYPE,FOLLOW_KW_VALUE_TYPE_in_descTabTypeExpr6213); 
            	            KW_VALUE_TYPE363_tree = 
            	            (CommonTree)adaptor.create(KW_VALUE_TYPE363)
            	            ;
            	            adaptor.addChild(root_0, KW_VALUE_TYPE363_tree);


            	            }
            	            break;
            	        case 4 :
            	            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:69: identifier
            	            {
            	            pushFollow(FOLLOW_identifier_in_descTabTypeExpr6217);
            	            identifier364=identifier();

            	            state._fsp--;

            	            adaptor.addChild(root_0, identifier364.getTree());

            	            }
            	            break;

            	    }


            	    }
            	    break;

            	default :
            	    break loop96;
                }
            } while (true);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:83: ( identifier )?
            int alt97=2;
            switch ( input.LA(1) ) {
                case Identifier:
                case KW_ADD:
                case KW_ADMIN:
                case KW_AFTER:
                case KW_ALL:
                case KW_ALTER:
                case KW_ANALYZE:
                case KW_ARCHIVE:
                case KW_ARRAY:
                case KW_AS:
                case KW_ASC:
                case KW_AUTHORIZATION:
                case KW_BEFORE:
                case KW_BETWEEN:
                case KW_BIGINT:
                case KW_BINARY:
                case KW_BOOLEAN:
                case KW_BOTH:
                case KW_BUCKET:
                case KW_BUCKETS:
                case KW_BY:
                case KW_CASCADE:
                case KW_CHANGE:
                case KW_CLUSTER:
                case KW_CLUSTERED:
                case KW_CLUSTERSTATUS:
                case KW_COLLECTION:
                case KW_COLUMNS:
                case KW_COMMENT:
                case KW_COMPACT:
                case KW_COMPACTIONS:
                case KW_COMPUTE:
                case KW_CONCATENATE:
                case KW_CONTINUE:
                case KW_CREATE:
                case KW_CUBE:
                case KW_CURSOR:
                case KW_DATA:
                case KW_DATABASES:
                case KW_DATE:
                case KW_DATETIME:
                case KW_DBPROPERTIES:
                case KW_DECIMAL:
                case KW_DEFAULT:
                case KW_DEFERRED:
                case KW_DEFINED:
                case KW_DELETE:
                case KW_DELIMITED:
                case KW_DEPENDENCY:
                case KW_DESC:
                case KW_DESCRIBE:
                case KW_DIRECTORIES:
                case KW_DIRECTORY:
                case KW_DISABLE:
                case KW_DISTRIBUTE:
                case KW_DOUBLE:
                case KW_DROP:
                case KW_ELEM_TYPE:
                case KW_ENABLE:
                case KW_ESCAPED:
                case KW_EXCLUSIVE:
                case KW_EXISTS:
                case KW_EXPLAIN:
                case KW_EXPORT:
                case KW_EXTERNAL:
                case KW_FALSE:
                case KW_FETCH:
                case KW_FIELDS:
                case KW_FILE:
                case KW_FILEFORMAT:
                case KW_FIRST:
                case KW_FLOAT:
                case KW_FOR:
                case KW_FORMAT:
                case KW_FORMATTED:
                case KW_FULL:
                case KW_FUNCTIONS:
                case KW_GRANT:
                case KW_GROUP:
                case KW_GROUPING:
                case KW_HOLD_DDLTIME:
                case KW_IDXPROPERTIES:
                case KW_IGNORE:
                case KW_IMPORT:
                case KW_IN:
                case KW_INDEX:
                case KW_INDEXES:
                case KW_INNER:
                case KW_INPATH:
                case KW_INPUTDRIVER:
                case KW_INPUTFORMAT:
                case KW_INSERT:
                case KW_INT:
                case KW_INTERSECT:
                case KW_INTO:
                case KW_IS:
                case KW_ITEMS:
                case KW_JAR:
                case KW_KEYS:
                case KW_KEY_TYPE:
                case KW_LATERAL:
                case KW_LEFT:
                case KW_LIKE:
                case KW_LIMIT:
                case KW_LINES:
                case KW_LOAD:
                case KW_LOCAL:
                case KW_LOCATION:
                case KW_LOCK:
                case KW_LOCKS:
                case KW_LOGICAL:
                case KW_LONG:
                case KW_MAPJOIN:
                case KW_MATERIALIZED:
                case KW_MINUS:
                case KW_MSCK:
                case KW_NONE:
                case KW_NOSCAN:
                case KW_NO_DROP:
                case KW_NULL:
                case KW_OF:
                case KW_OFFLINE:
                case KW_OPTION:
                case KW_ORDER:
                case KW_OUT:
                case KW_OUTER:
                case KW_OUTPUTDRIVER:
                case KW_OUTPUTFORMAT:
                case KW_OVERWRITE:
                case KW_OWNER:
                case KW_PARTITIONED:
                case KW_PARTITIONS:
                case KW_PERCENT:
                case KW_PLUS:
                case KW_PRETTY:
                case KW_PRINCIPALS:
                case KW_PROCEDURE:
                case KW_PROTECTION:
                case KW_PURGE:
                case KW_RANGE:
                case KW_READ:
                case KW_READONLY:
                case KW_READS:
                case KW_REBUILD:
                case KW_RECORDREADER:
                case KW_RECORDWRITER:
                case KW_REGEXP:
                case KW_RENAME:
                case KW_REPAIR:
                case KW_REPLACE:
                case KW_RESTRICT:
                case KW_REVOKE:
                case KW_REWRITE:
                case KW_RIGHT:
                case KW_RLIKE:
                case KW_ROLE:
                case KW_ROLES:
                case KW_ROLLUP:
                case KW_ROW:
                case KW_ROWS:
                case KW_SCHEMA:
                case KW_SCHEMAS:
                case KW_SEMI:
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                case KW_SET:
                case KW_SETS:
                case KW_SHARED:
                case KW_SHOW:
                case KW_SHOW_DATABASE:
                case KW_SKEWED:
                case KW_SMALLINT:
                case KW_SORT:
                case KW_SORTED:
                case KW_SSL:
                case KW_STATISTICS:
                case KW_STORED:
                case KW_STREAMTABLE:
                case KW_STRING:
                case KW_STRUCT:
                case KW_TABLE:
                case KW_TABLES:
                case KW_TBLPROPERTIES:
                case KW_TEMPORARY:
                case KW_TERMINATED:
                case KW_TIMESTAMP:
                case KW_TINYINT:
                case KW_TO:
                case KW_TOUCH:
                case KW_TRANSACTIONS:
                case KW_TRIGGER:
                case KW_TRUE:
                case KW_TRUNCATE:
                case KW_UNARCHIVE:
                case KW_UNDO:
                case KW_UNION:
                case KW_UNIONTYPE:
                case KW_UNLOCK:
                case KW_UNSET:
                case KW_UNSIGNED:
                case KW_UPDATE:
                case KW_USE:
                case KW_USER:
                case KW_USING:
                case KW_UTC:
                case KW_UTCTIMESTAMP:
                case KW_VALUES:
                case KW_VALUE_TYPE:
                case KW_VIEW:
                case KW_WHILE:
                case KW_WITH:
                    {
                    alt97=1;
                    }
                    break;
                case KW_PARTITION:
                    {
                    switch ( input.LA(2) ) {
                        case EOF:
                        case KW_PARTITION:
                            {
                            alt97=1;
                            }
                            break;
                    }

                    }
                    break;
            }

            switch (alt97) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1298:83: identifier
                    {
                    pushFollow(FOLLOW_identifier_in_descTabTypeExpr6222);
                    identifier365=identifier();

                    state._fsp--;

                    adaptor.addChild(root_0, identifier365.getTree());

                    }
                    break;

            }


            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "descTabTypeExpr"


    public static class partTypeExpr_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "partTypeExpr"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1301:1: partTypeExpr : tabTypeExpr ( partitionSpec )? -> ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? ) ;
    public final HiveParser.partTypeExpr_return partTypeExpr() throws RecognitionException {
        HiveParser.partTypeExpr_return retval = new HiveParser.partTypeExpr_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.tabTypeExpr_return tabTypeExpr366 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec367 =null;


        RewriteRuleSubtreeStream stream_tabTypeExpr=new RewriteRuleSubtreeStream(adaptor,"rule tabTypeExpr");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("specifying table partitions", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1304:5: ( tabTypeExpr ( partitionSpec )? -> ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1304:8: tabTypeExpr ( partitionSpec )?
            {
            pushFollow(FOLLOW_tabTypeExpr_in_partTypeExpr6250);
            tabTypeExpr366=tabTypeExpr();

            state._fsp--;

            stream_tabTypeExpr.add(tabTypeExpr366.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1304:20: ( partitionSpec )?
            int alt98=2;
            switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt98=1;
                    }
                    break;
            }

            switch (alt98) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1304:20: partitionSpec
                    {
                    pushFollow(FOLLOW_partitionSpec_in_partTypeExpr6252);
                    partitionSpec367=partitionSpec();

                    state._fsp--;

                    stream_partitionSpec.add(partitionSpec367.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: partitionSpec, tabTypeExpr
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1304:35: -> ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1304:38: ^( TOK_TABTYPE tabTypeExpr ( partitionSpec )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABTYPE, "TOK_TABTYPE")
                , root_1);

                adaptor.addChild(root_1, stream_tabTypeExpr.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1304:64: ( partitionSpec )?
                if ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "partTypeExpr"


    public static class descPartTypeExpr_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "descPartTypeExpr"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1307:1: descPartTypeExpr : descTabTypeExpr ( partitionSpec )? -> ^( TOK_TABTYPE descTabTypeExpr ( partitionSpec )? ) ;
    public final HiveParser.descPartTypeExpr_return descPartTypeExpr() throws RecognitionException {
        HiveParser.descPartTypeExpr_return retval = new HiveParser.descPartTypeExpr_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.descTabTypeExpr_return descTabTypeExpr368 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec369 =null;


        RewriteRuleSubtreeStream stream_descTabTypeExpr=new RewriteRuleSubtreeStream(adaptor,"rule descTabTypeExpr");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("specifying describe table partitions", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:5: ( descTabTypeExpr ( partitionSpec )? -> ^( TOK_TABTYPE descTabTypeExpr ( partitionSpec )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:8: descTabTypeExpr ( partitionSpec )?
            {
            pushFollow(FOLLOW_descTabTypeExpr_in_descPartTypeExpr6292);
            descTabTypeExpr368=descTabTypeExpr();

            state._fsp--;

            stream_descTabTypeExpr.add(descTabTypeExpr368.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:24: ( partitionSpec )?
            int alt99=2;
            switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt99=1;
                    }
                    break;
            }

            switch (alt99) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:24: partitionSpec
                    {
                    pushFollow(FOLLOW_partitionSpec_in_descPartTypeExpr6294);
                    partitionSpec369=partitionSpec();

                    state._fsp--;

                    stream_partitionSpec.add(partitionSpec369.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: descTabTypeExpr, partitionSpec
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1310:39: -> ^( TOK_TABTYPE descTabTypeExpr ( partitionSpec )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:42: ^( TOK_TABTYPE descTabTypeExpr ( partitionSpec )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABTYPE, "TOK_TABTYPE")
                , root_1);

                adaptor.addChild(root_1, stream_descTabTypeExpr.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1310:72: ( partitionSpec )?
                if ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "descPartTypeExpr"


    public static class descStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "descStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1313:1: descStatement : ( ( KW_DESCRIBE | KW_DESC ) (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED |descOptions= KW_PRETTY )? (parttype= descPartTypeExpr ) -> ^( TOK_DESCTABLE $parttype ( $descOptions)? ) | ( KW_DESCRIBE | KW_DESC ) KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames ) -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? ) | ( KW_DESCRIBE | KW_DESC ) ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier ) -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? ) );
    public final HiveParser.descStatement_return descStatement() throws RecognitionException {
        HiveParser.descStatement_return retval = new HiveParser.descStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token descOptions=null;
        Token KW_DESCRIBE370=null;
        Token KW_DESC371=null;
        Token KW_DESCRIBE372=null;
        Token KW_DESC373=null;
        Token KW_FUNCTION374=null;
        Token KW_EXTENDED375=null;
        Token KW_DESCRIBE376=null;
        Token KW_DESC377=null;
        Token KW_DATABASE378=null;
        Token KW_SCHEMA379=null;
        Token KW_EXTENDED380=null;
        HiveParser.descPartTypeExpr_return parttype =null;

        HiveParser_IdentifiersParser.descFuncNames_return name =null;

        HiveParser_IdentifiersParser.identifier_return dbName =null;


        CommonTree descOptions_tree=null;
        CommonTree KW_DESCRIBE370_tree=null;
        CommonTree KW_DESC371_tree=null;
        CommonTree KW_DESCRIBE372_tree=null;
        CommonTree KW_DESC373_tree=null;
        CommonTree KW_FUNCTION374_tree=null;
        CommonTree KW_EXTENDED375_tree=null;
        CommonTree KW_DESCRIBE376_tree=null;
        CommonTree KW_DESC377_tree=null;
        CommonTree KW_DATABASE378_tree=null;
        CommonTree KW_SCHEMA379_tree=null;
        CommonTree KW_EXTENDED380_tree=null;
        RewriteRuleTokenStream stream_KW_DESC=new RewriteRuleTokenStream(adaptor,"token KW_DESC");
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_FUNCTION=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTION");
        RewriteRuleTokenStream stream_KW_FORMATTED=new RewriteRuleTokenStream(adaptor,"token KW_FORMATTED");
        RewriteRuleTokenStream stream_KW_EXTENDED=new RewriteRuleTokenStream(adaptor,"token KW_EXTENDED");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_KW_PRETTY=new RewriteRuleTokenStream(adaptor,"token KW_PRETTY");
        RewriteRuleTokenStream stream_KW_DESCRIBE=new RewriteRuleTokenStream(adaptor,"token KW_DESCRIBE");
        RewriteRuleSubtreeStream stream_descPartTypeExpr=new RewriteRuleSubtreeStream(adaptor,"rule descPartTypeExpr");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        RewriteRuleSubtreeStream stream_descFuncNames=new RewriteRuleSubtreeStream(adaptor,"rule descFuncNames");
         pushMsg("describe statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:5: ( ( KW_DESCRIBE | KW_DESC ) (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED |descOptions= KW_PRETTY )? (parttype= descPartTypeExpr ) -> ^( TOK_DESCTABLE $parttype ( $descOptions)? ) | ( KW_DESCRIBE | KW_DESC ) KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames ) -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? ) | ( KW_DESCRIBE | KW_DESC ) ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier ) -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? ) )
            int alt107=3;
            switch ( input.LA(1) ) {
            case KW_DESCRIBE:
                {
                switch ( input.LA(2) ) {
                case Identifier:
                case KW_ADD:
                case KW_ADMIN:
                case KW_AFTER:
                case KW_ALL:
                case KW_ALTER:
                case KW_ANALYZE:
                case KW_ARCHIVE:
                case KW_ARRAY:
                case KW_AS:
                case KW_ASC:
                case KW_AUTHORIZATION:
                case KW_BEFORE:
                case KW_BETWEEN:
                case KW_BIGINT:
                case KW_BINARY:
                case KW_BOOLEAN:
                case KW_BOTH:
                case KW_BUCKET:
                case KW_BUCKETS:
                case KW_BY:
                case KW_CASCADE:
                case KW_CHANGE:
                case KW_CLUSTER:
                case KW_CLUSTERED:
                case KW_CLUSTERSTATUS:
                case KW_COLLECTION:
                case KW_COLUMNS:
                case KW_COMMENT:
                case KW_COMPACT:
                case KW_COMPACTIONS:
                case KW_COMPUTE:
                case KW_CONCATENATE:
                case KW_CONTINUE:
                case KW_CREATE:
                case KW_CUBE:
                case KW_CURSOR:
                case KW_DATA:
                case KW_DATABASES:
                case KW_DATE:
                case KW_DATETIME:
                case KW_DBPROPERTIES:
                case KW_DECIMAL:
                case KW_DEFAULT:
                case KW_DEFERRED:
                case KW_DEFINED:
                case KW_DELETE:
                case KW_DELIMITED:
                case KW_DEPENDENCY:
                case KW_DESC:
                case KW_DESCRIBE:
                case KW_DIRECTORIES:
                case KW_DIRECTORY:
                case KW_DISABLE:
                case KW_DISTRIBUTE:
                case KW_DOUBLE:
                case KW_DROP:
                case KW_ELEM_TYPE:
                case KW_ENABLE:
                case KW_ESCAPED:
                case KW_EXCLUSIVE:
                case KW_EXISTS:
                case KW_EXPLAIN:
                case KW_EXPORT:
                case KW_EXTENDED:
                case KW_EXTERNAL:
                case KW_FALSE:
                case KW_FETCH:
                case KW_FIELDS:
                case KW_FILE:
                case KW_FILEFORMAT:
                case KW_FIRST:
                case KW_FLOAT:
                case KW_FOR:
                case KW_FORMAT:
                case KW_FORMATTED:
                case KW_FULL:
                case KW_FUNCTIONS:
                case KW_GRANT:
                case KW_GROUP:
                case KW_GROUPING:
                case KW_HOLD_DDLTIME:
                case KW_IDXPROPERTIES:
                case KW_IGNORE:
                case KW_IMPORT:
                case KW_IN:
                case KW_INDEX:
                case KW_INDEXES:
                case KW_INNER:
                case KW_INPATH:
                case KW_INPUTDRIVER:
                case KW_INPUTFORMAT:
                case KW_INSERT:
                case KW_INT:
                case KW_INTERSECT:
                case KW_INTO:
                case KW_IS:
                case KW_ITEMS:
                case KW_JAR:
                case KW_KEYS:
                case KW_KEY_TYPE:
                case KW_LATERAL:
                case KW_LEFT:
                case KW_LIKE:
                case KW_LIMIT:
                case KW_LINES:
                case KW_LOAD:
                case KW_LOCAL:
                case KW_LOCATION:
                case KW_LOCK:
                case KW_LOCKS:
                case KW_LOGICAL:
                case KW_LONG:
                case KW_MAPJOIN:
                case KW_MATERIALIZED:
                case KW_MINUS:
                case KW_MSCK:
                case KW_NONE:
                case KW_NOSCAN:
                case KW_NO_DROP:
                case KW_NULL:
                case KW_OF:
                case KW_OFFLINE:
                case KW_OPTION:
                case KW_ORDER:
                case KW_OUT:
                case KW_OUTER:
                case KW_OUTPUTDRIVER:
                case KW_OUTPUTFORMAT:
                case KW_OVERWRITE:
                case KW_OWNER:
                case KW_PARTITION:
                case KW_PARTITIONED:
                case KW_PARTITIONS:
                case KW_PERCENT:
                case KW_PLUS:
                case KW_PRETTY:
                case KW_PRINCIPALS:
                case KW_PROCEDURE:
                case KW_PROTECTION:
                case KW_PURGE:
                case KW_RANGE:
                case KW_READ:
                case KW_READONLY:
                case KW_READS:
                case KW_REBUILD:
                case KW_RECORDREADER:
                case KW_RECORDWRITER:
                case KW_REGEXP:
                case KW_RENAME:
                case KW_REPAIR:
                case KW_REPLACE:
                case KW_RESTRICT:
                case KW_REVOKE:
                case KW_REWRITE:
                case KW_RIGHT:
                case KW_RLIKE:
                case KW_ROLE:
                case KW_ROLES:
                case KW_ROLLUP:
                case KW_ROW:
                case KW_ROWS:
                case KW_SCHEMAS:
                case KW_SEMI:
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                case KW_SET:
                case KW_SETS:
                case KW_SHARED:
                case KW_SHOW:
                case KW_SHOW_DATABASE:
                case KW_SKEWED:
                case KW_SMALLINT:
                case KW_SORT:
                case KW_SORTED:
                case KW_SSL:
                case KW_STATISTICS:
                case KW_STORED:
                case KW_STREAMTABLE:
                case KW_STRING:
                case KW_STRUCT:
                case KW_TABLE:
                case KW_TABLES:
                case KW_TBLPROPERTIES:
                case KW_TEMPORARY:
                case KW_TERMINATED:
                case KW_TIMESTAMP:
                case KW_TINYINT:
                case KW_TO:
                case KW_TOUCH:
                case KW_TRANSACTIONS:
                case KW_TRIGGER:
                case KW_TRUE:
                case KW_TRUNCATE:
                case KW_UNARCHIVE:
                case KW_UNDO:
                case KW_UNION:
                case KW_UNIONTYPE:
                case KW_UNLOCK:
                case KW_UNSET:
                case KW_UNSIGNED:
                case KW_UPDATE:
                case KW_USE:
                case KW_USER:
                case KW_USING:
                case KW_UTC:
                case KW_UTCTIMESTAMP:
                case KW_VALUES:
                case KW_VALUE_TYPE:
                case KW_VIEW:
                case KW_WHILE:
                case KW_WITH:
                    {
                    alt107=1;
                    }
                    break;
                case KW_SCHEMA:
                    {
                    switch ( input.LA(3) ) {
                    case DOT:
                        {
                        alt107=1;
                        }
                        break;
                    case Identifier:
                        {
                        alt107=1;
                        }
                        break;
                    case KW_PARTITION:
                        {
                        alt107=1;
                        }
                        break;
                    case EOF:
                    case KW_ADD:
                    case KW_ADMIN:
                    case KW_AFTER:
                    case KW_ALL:
                    case KW_ALTER:
                    case KW_ANALYZE:
                    case KW_ARCHIVE:
                    case KW_ARRAY:
                    case KW_AS:
                    case KW_ASC:
                    case KW_AUTHORIZATION:
                    case KW_BEFORE:
                    case KW_BETWEEN:
                    case KW_BIGINT:
                    case KW_BINARY:
                    case KW_BOOLEAN:
                    case KW_BOTH:
                    case KW_BUCKET:
                    case KW_BUCKETS:
                    case KW_BY:
                    case KW_CASCADE:
                    case KW_CHANGE:
                    case KW_CLUSTER:
                    case KW_CLUSTERED:
                    case KW_CLUSTERSTATUS:
                    case KW_COLLECTION:
                    case KW_COLUMNS:
                    case KW_COMMENT:
                    case KW_COMPACT:
                    case KW_COMPACTIONS:
                    case KW_COMPUTE:
                    case KW_CONCATENATE:
                    case KW_CONTINUE:
                    case KW_CREATE:
                    case KW_CUBE:
                    case KW_CURSOR:
                    case KW_DATA:
                    case KW_DATABASES:
                    case KW_DATE:
                    case KW_DATETIME:
                    case KW_DBPROPERTIES:
                    case KW_DECIMAL:
                    case KW_DEFAULT:
                    case KW_DEFERRED:
                    case KW_DEFINED:
                    case KW_DELETE:
                    case KW_DELIMITED:
                    case KW_DEPENDENCY:
                    case KW_DESC:
                    case KW_DESCRIBE:
                    case KW_DIRECTORIES:
                    case KW_DIRECTORY:
                    case KW_DISABLE:
                    case KW_DISTRIBUTE:
                    case KW_DOUBLE:
                    case KW_DROP:
                    case KW_ELEM_TYPE:
                    case KW_ENABLE:
                    case KW_ESCAPED:
                    case KW_EXCLUSIVE:
                    case KW_EXISTS:
                    case KW_EXPLAIN:
                    case KW_EXPORT:
                    case KW_EXTERNAL:
                    case KW_FALSE:
                    case KW_FETCH:
                    case KW_FIELDS:
                    case KW_FILE:
                    case KW_FILEFORMAT:
                    case KW_FIRST:
                    case KW_FLOAT:
                    case KW_FOR:
                    case KW_FORMAT:
                    case KW_FORMATTED:
                    case KW_FULL:
                    case KW_FUNCTIONS:
                    case KW_GRANT:
                    case KW_GROUP:
                    case KW_GROUPING:
                    case KW_HOLD_DDLTIME:
                    case KW_IDXPROPERTIES:
                    case KW_IGNORE:
                    case KW_IMPORT:
                    case KW_IN:
                    case KW_INDEX:
                    case KW_INDEXES:
                    case KW_INNER:
                    case KW_INPATH:
                    case KW_INPUTDRIVER:
                    case KW_INPUTFORMAT:
                    case KW_INSERT:
                    case KW_INT:
                    case KW_INTERSECT:
                    case KW_INTO:
                    case KW_IS:
                    case KW_ITEMS:
                    case KW_JAR:
                    case KW_KEYS:
                    case KW_KEY_TYPE:
                    case KW_LATERAL:
                    case KW_LEFT:
                    case KW_LIKE:
                    case KW_LIMIT:
                    case KW_LINES:
                    case KW_LOAD:
                    case KW_LOCAL:
                    case KW_LOCATION:
                    case KW_LOCK:
                    case KW_LOCKS:
                    case KW_LOGICAL:
                    case KW_LONG:
                    case KW_MAPJOIN:
                    case KW_MATERIALIZED:
                    case KW_MINUS:
                    case KW_MSCK:
                    case KW_NONE:
                    case KW_NOSCAN:
                    case KW_NO_DROP:
                    case KW_NULL:
                    case KW_OF:
                    case KW_OFFLINE:
                    case KW_OPTION:
                    case KW_ORDER:
                    case KW_OUT:
                    case KW_OUTER:
                    case KW_OUTPUTDRIVER:
                    case KW_OUTPUTFORMAT:
                    case KW_OVERWRITE:
                    case KW_OWNER:
                    case KW_PARTITIONED:
                    case KW_PARTITIONS:
                    case KW_PERCENT:
                    case KW_PLUS:
                    case KW_PRETTY:
                    case KW_PRINCIPALS:
                    case KW_PROCEDURE:
                    case KW_PROTECTION:
                    case KW_PURGE:
                    case KW_RANGE:
                    case KW_READ:
                    case KW_READONLY:
                    case KW_READS:
                    case KW_REBUILD:
                    case KW_RECORDREADER:
                    case KW_RECORDWRITER:
                    case KW_REGEXP:
                    case KW_RENAME:
                    case KW_REPAIR:
                    case KW_REPLACE:
                    case KW_RESTRICT:
                    case KW_REVOKE:
                    case KW_REWRITE:
                    case KW_RIGHT:
                    case KW_RLIKE:
                    case KW_ROLE:
                    case KW_ROLES:
                    case KW_ROLLUP:
                    case KW_ROW:
                    case KW_ROWS:
                    case KW_SCHEMA:
                    case KW_SCHEMAS:
                    case KW_SEMI:
                    case KW_SERDE:
                    case KW_SERDEPROPERTIES:
                    case KW_SET:
                    case KW_SETS:
                    case KW_SHARED:
                    case KW_SHOW:
                    case KW_SHOW_DATABASE:
                    case KW_SKEWED:
                    case KW_SMALLINT:
                    case KW_SORT:
                    case KW_SORTED:
                    case KW_SSL:
                    case KW_STATISTICS:
                    case KW_STORED:
                    case KW_STREAMTABLE:
                    case KW_STRING:
                    case KW_STRUCT:
                    case KW_TABLE:
                    case KW_TABLES:
                    case KW_TBLPROPERTIES:
                    case KW_TEMPORARY:
                    case KW_TERMINATED:
                    case KW_TIMESTAMP:
                    case KW_TINYINT:
                    case KW_TO:
                    case KW_TOUCH:
                    case KW_TRANSACTIONS:
                    case KW_TRIGGER:
                    case KW_TRUE:
                    case KW_TRUNCATE:
                    case KW_UNARCHIVE:
                    case KW_UNDO:
                    case KW_UNION:
                    case KW_UNIONTYPE:
                    case KW_UNLOCK:
                    case KW_UNSET:
                    case KW_UNSIGNED:
                    case KW_UPDATE:
                    case KW_USE:
                    case KW_USER:
                    case KW_USING:
                    case KW_UTC:
                    case KW_UTCTIMESTAMP:
                    case KW_VALUES:
                    case KW_VALUE_TYPE:
                    case KW_VIEW:
                    case KW_WHILE:
                    case KW_WITH:
                        {
                        alt107=1;
                        }
                        break;
                    case KW_EXTENDED:
                        {
                        alt107=3;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 107, 7, input);

                        throw nvae;

                    }

                    }
                    break;
                case KW_FUNCTION:
                    {
                    alt107=2;
                    }
                    break;
                case KW_DATABASE:
                    {
                    alt107=3;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 107, 1, input);

                    throw nvae;

                }

                }
                break;
            case KW_DESC:
                {
                switch ( input.LA(2) ) {
                case Identifier:
                case KW_ADD:
                case KW_ADMIN:
                case KW_AFTER:
                case KW_ALL:
                case KW_ALTER:
                case KW_ANALYZE:
                case KW_ARCHIVE:
                case KW_ARRAY:
                case KW_AS:
                case KW_ASC:
                case KW_AUTHORIZATION:
                case KW_BEFORE:
                case KW_BETWEEN:
                case KW_BIGINT:
                case KW_BINARY:
                case KW_BOOLEAN:
                case KW_BOTH:
                case KW_BUCKET:
                case KW_BUCKETS:
                case KW_BY:
                case KW_CASCADE:
                case KW_CHANGE:
                case KW_CLUSTER:
                case KW_CLUSTERED:
                case KW_CLUSTERSTATUS:
                case KW_COLLECTION:
                case KW_COLUMNS:
                case KW_COMMENT:
                case KW_COMPACT:
                case KW_COMPACTIONS:
                case KW_COMPUTE:
                case KW_CONCATENATE:
                case KW_CONTINUE:
                case KW_CREATE:
                case KW_CUBE:
                case KW_CURSOR:
                case KW_DATA:
                case KW_DATABASES:
                case KW_DATE:
                case KW_DATETIME:
                case KW_DBPROPERTIES:
                case KW_DECIMAL:
                case KW_DEFAULT:
                case KW_DEFERRED:
                case KW_DEFINED:
                case KW_DELETE:
                case KW_DELIMITED:
                case KW_DEPENDENCY:
                case KW_DESC:
                case KW_DESCRIBE:
                case KW_DIRECTORIES:
                case KW_DIRECTORY:
                case KW_DISABLE:
                case KW_DISTRIBUTE:
                case KW_DOUBLE:
                case KW_DROP:
                case KW_ELEM_TYPE:
                case KW_ENABLE:
                case KW_ESCAPED:
                case KW_EXCLUSIVE:
                case KW_EXISTS:
                case KW_EXPLAIN:
                case KW_EXPORT:
                case KW_EXTENDED:
                case KW_EXTERNAL:
                case KW_FALSE:
                case KW_FETCH:
                case KW_FIELDS:
                case KW_FILE:
                case KW_FILEFORMAT:
                case KW_FIRST:
                case KW_FLOAT:
                case KW_FOR:
                case KW_FORMAT:
                case KW_FORMATTED:
                case KW_FULL:
                case KW_FUNCTIONS:
                case KW_GRANT:
                case KW_GROUP:
                case KW_GROUPING:
                case KW_HOLD_DDLTIME:
                case KW_IDXPROPERTIES:
                case KW_IGNORE:
                case KW_IMPORT:
                case KW_IN:
                case KW_INDEX:
                case KW_INDEXES:
                case KW_INNER:
                case KW_INPATH:
                case KW_INPUTDRIVER:
                case KW_INPUTFORMAT:
                case KW_INSERT:
                case KW_INT:
                case KW_INTERSECT:
                case KW_INTO:
                case KW_IS:
                case KW_ITEMS:
                case KW_JAR:
                case KW_KEYS:
                case KW_KEY_TYPE:
                case KW_LATERAL:
                case KW_LEFT:
                case KW_LIKE:
                case KW_LIMIT:
                case KW_LINES:
                case KW_LOAD:
                case KW_LOCAL:
                case KW_LOCATION:
                case KW_LOCK:
                case KW_LOCKS:
                case KW_LOGICAL:
                case KW_LONG:
                case KW_MAPJOIN:
                case KW_MATERIALIZED:
                case KW_MINUS:
                case KW_MSCK:
                case KW_NONE:
                case KW_NOSCAN:
                case KW_NO_DROP:
                case KW_NULL:
                case KW_OF:
                case KW_OFFLINE:
                case KW_OPTION:
                case KW_ORDER:
                case KW_OUT:
                case KW_OUTER:
                case KW_OUTPUTDRIVER:
                case KW_OUTPUTFORMAT:
                case KW_OVERWRITE:
                case KW_OWNER:
                case KW_PARTITION:
                case KW_PARTITIONED:
                case KW_PARTITIONS:
                case KW_PERCENT:
                case KW_PLUS:
                case KW_PRETTY:
                case KW_PRINCIPALS:
                case KW_PROCEDURE:
                case KW_PROTECTION:
                case KW_PURGE:
                case KW_RANGE:
                case KW_READ:
                case KW_READONLY:
                case KW_READS:
                case KW_REBUILD:
                case KW_RECORDREADER:
                case KW_RECORDWRITER:
                case KW_REGEXP:
                case KW_RENAME:
                case KW_REPAIR:
                case KW_REPLACE:
                case KW_RESTRICT:
                case KW_REVOKE:
                case KW_REWRITE:
                case KW_RIGHT:
                case KW_RLIKE:
                case KW_ROLE:
                case KW_ROLES:
                case KW_ROLLUP:
                case KW_ROW:
                case KW_ROWS:
                case KW_SCHEMAS:
                case KW_SEMI:
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                case KW_SET:
                case KW_SETS:
                case KW_SHARED:
                case KW_SHOW:
                case KW_SHOW_DATABASE:
                case KW_SKEWED:
                case KW_SMALLINT:
                case KW_SORT:
                case KW_SORTED:
                case KW_SSL:
                case KW_STATISTICS:
                case KW_STORED:
                case KW_STREAMTABLE:
                case KW_STRING:
                case KW_STRUCT:
                case KW_TABLE:
                case KW_TABLES:
                case KW_TBLPROPERTIES:
                case KW_TEMPORARY:
                case KW_TERMINATED:
                case KW_TIMESTAMP:
                case KW_TINYINT:
                case KW_TO:
                case KW_TOUCH:
                case KW_TRANSACTIONS:
                case KW_TRIGGER:
                case KW_TRUE:
                case KW_TRUNCATE:
                case KW_UNARCHIVE:
                case KW_UNDO:
                case KW_UNION:
                case KW_UNIONTYPE:
                case KW_UNLOCK:
                case KW_UNSET:
                case KW_UNSIGNED:
                case KW_UPDATE:
                case KW_USE:
                case KW_USER:
                case KW_USING:
                case KW_UTC:
                case KW_UTCTIMESTAMP:
                case KW_VALUES:
                case KW_VALUE_TYPE:
                case KW_VIEW:
                case KW_WHILE:
                case KW_WITH:
                    {
                    alt107=1;
                    }
                    break;
                case KW_SCHEMA:
                    {
                    switch ( input.LA(3) ) {
                    case DOT:
                        {
                        alt107=1;
                        }
                        break;
                    case Identifier:
                        {
                        alt107=1;
                        }
                        break;
                    case KW_PARTITION:
                        {
                        alt107=1;
                        }
                        break;
                    case EOF:
                    case KW_ADD:
                    case KW_ADMIN:
                    case KW_AFTER:
                    case KW_ALL:
                    case KW_ALTER:
                    case KW_ANALYZE:
                    case KW_ARCHIVE:
                    case KW_ARRAY:
                    case KW_AS:
                    case KW_ASC:
                    case KW_AUTHORIZATION:
                    case KW_BEFORE:
                    case KW_BETWEEN:
                    case KW_BIGINT:
                    case KW_BINARY:
                    case KW_BOOLEAN:
                    case KW_BOTH:
                    case KW_BUCKET:
                    case KW_BUCKETS:
                    case KW_BY:
                    case KW_CASCADE:
                    case KW_CHANGE:
                    case KW_CLUSTER:
                    case KW_CLUSTERED:
                    case KW_CLUSTERSTATUS:
                    case KW_COLLECTION:
                    case KW_COLUMNS:
                    case KW_COMMENT:
                    case KW_COMPACT:
                    case KW_COMPACTIONS:
                    case KW_COMPUTE:
                    case KW_CONCATENATE:
                    case KW_CONTINUE:
                    case KW_CREATE:
                    case KW_CUBE:
                    case KW_CURSOR:
                    case KW_DATA:
                    case KW_DATABASES:
                    case KW_DATE:
                    case KW_DATETIME:
                    case KW_DBPROPERTIES:
                    case KW_DECIMAL:
                    case KW_DEFAULT:
                    case KW_DEFERRED:
                    case KW_DEFINED:
                    case KW_DELETE:
                    case KW_DELIMITED:
                    case KW_DEPENDENCY:
                    case KW_DESC:
                    case KW_DESCRIBE:
                    case KW_DIRECTORIES:
                    case KW_DIRECTORY:
                    case KW_DISABLE:
                    case KW_DISTRIBUTE:
                    case KW_DOUBLE:
                    case KW_DROP:
                    case KW_ELEM_TYPE:
                    case KW_ENABLE:
                    case KW_ESCAPED:
                    case KW_EXCLUSIVE:
                    case KW_EXISTS:
                    case KW_EXPLAIN:
                    case KW_EXPORT:
                    case KW_EXTERNAL:
                    case KW_FALSE:
                    case KW_FETCH:
                    case KW_FIELDS:
                    case KW_FILE:
                    case KW_FILEFORMAT:
                    case KW_FIRST:
                    case KW_FLOAT:
                    case KW_FOR:
                    case KW_FORMAT:
                    case KW_FORMATTED:
                    case KW_FULL:
                    case KW_FUNCTIONS:
                    case KW_GRANT:
                    case KW_GROUP:
                    case KW_GROUPING:
                    case KW_HOLD_DDLTIME:
                    case KW_IDXPROPERTIES:
                    case KW_IGNORE:
                    case KW_IMPORT:
                    case KW_IN:
                    case KW_INDEX:
                    case KW_INDEXES:
                    case KW_INNER:
                    case KW_INPATH:
                    case KW_INPUTDRIVER:
                    case KW_INPUTFORMAT:
                    case KW_INSERT:
                    case KW_INT:
                    case KW_INTERSECT:
                    case KW_INTO:
                    case KW_IS:
                    case KW_ITEMS:
                    case KW_JAR:
                    case KW_KEYS:
                    case KW_KEY_TYPE:
                    case KW_LATERAL:
                    case KW_LEFT:
                    case KW_LIKE:
                    case KW_LIMIT:
                    case KW_LINES:
                    case KW_LOAD:
                    case KW_LOCAL:
                    case KW_LOCATION:
                    case KW_LOCK:
                    case KW_LOCKS:
                    case KW_LOGICAL:
                    case KW_LONG:
                    case KW_MAPJOIN:
                    case KW_MATERIALIZED:
                    case KW_MINUS:
                    case KW_MSCK:
                    case KW_NONE:
                    case KW_NOSCAN:
                    case KW_NO_DROP:
                    case KW_NULL:
                    case KW_OF:
                    case KW_OFFLINE:
                    case KW_OPTION:
                    case KW_ORDER:
                    case KW_OUT:
                    case KW_OUTER:
                    case KW_OUTPUTDRIVER:
                    case KW_OUTPUTFORMAT:
                    case KW_OVERWRITE:
                    case KW_OWNER:
                    case KW_PARTITIONED:
                    case KW_PARTITIONS:
                    case KW_PERCENT:
                    case KW_PLUS:
                    case KW_PRETTY:
                    case KW_PRINCIPALS:
                    case KW_PROCEDURE:
                    case KW_PROTECTION:
                    case KW_PURGE:
                    case KW_RANGE:
                    case KW_READ:
                    case KW_READONLY:
                    case KW_READS:
                    case KW_REBUILD:
                    case KW_RECORDREADER:
                    case KW_RECORDWRITER:
                    case KW_REGEXP:
                    case KW_RENAME:
                    case KW_REPAIR:
                    case KW_REPLACE:
                    case KW_RESTRICT:
                    case KW_REVOKE:
                    case KW_REWRITE:
                    case KW_RIGHT:
                    case KW_RLIKE:
                    case KW_ROLE:
                    case KW_ROLES:
                    case KW_ROLLUP:
                    case KW_ROW:
                    case KW_ROWS:
                    case KW_SCHEMA:
                    case KW_SCHEMAS:
                    case KW_SEMI:
                    case KW_SERDE:
                    case KW_SERDEPROPERTIES:
                    case KW_SET:
                    case KW_SETS:
                    case KW_SHARED:
                    case KW_SHOW:
                    case KW_SHOW_DATABASE:
                    case KW_SKEWED:
                    case KW_SMALLINT:
                    case KW_SORT:
                    case KW_SORTED:
                    case KW_SSL:
                    case KW_STATISTICS:
                    case KW_STORED:
                    case KW_STREAMTABLE:
                    case KW_STRING:
                    case KW_STRUCT:
                    case KW_TABLE:
                    case KW_TABLES:
                    case KW_TBLPROPERTIES:
                    case KW_TEMPORARY:
                    case KW_TERMINATED:
                    case KW_TIMESTAMP:
                    case KW_TINYINT:
                    case KW_TO:
                    case KW_TOUCH:
                    case KW_TRANSACTIONS:
                    case KW_TRIGGER:
                    case KW_TRUE:
                    case KW_TRUNCATE:
                    case KW_UNARCHIVE:
                    case KW_UNDO:
                    case KW_UNION:
                    case KW_UNIONTYPE:
                    case KW_UNLOCK:
                    case KW_UNSET:
                    case KW_UNSIGNED:
                    case KW_UPDATE:
                    case KW_USE:
                    case KW_USER:
                    case KW_USING:
                    case KW_UTC:
                    case KW_UTCTIMESTAMP:
                    case KW_VALUES:
                    case KW_VALUE_TYPE:
                    case KW_VIEW:
                    case KW_WHILE:
                    case KW_WITH:
                        {
                        alt107=1;
                        }
                        break;
                    case KW_EXTENDED:
                        {
                        alt107=3;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 107, 15, input);

                        throw nvae;

                    }

                    }
                    break;
                case KW_FUNCTION:
                    {
                    alt107=2;
                    }
                    break;
                case KW_DATABASE:
                    {
                    alt107=3;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 107, 2, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 107, 0, input);

                throw nvae;

            }

            switch (alt107) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:7: ( KW_DESCRIBE | KW_DESC ) (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED |descOptions= KW_PRETTY )? (parttype= descPartTypeExpr )
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:7: ( KW_DESCRIBE | KW_DESC )
                    int alt100=2;
                    switch ( input.LA(1) ) {
                    case KW_DESCRIBE:
                        {
                        alt100=1;
                        }
                        break;
                    case KW_DESC:
                        {
                        alt100=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 100, 0, input);

                        throw nvae;

                    }

                    switch (alt100) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:8: KW_DESCRIBE
                            {
                            KW_DESCRIBE370=(Token)match(input,KW_DESCRIBE,FOLLOW_KW_DESCRIBE_in_descStatement6334);  
                            stream_KW_DESCRIBE.add(KW_DESCRIBE370);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:20: KW_DESC
                            {
                            KW_DESC371=(Token)match(input,KW_DESC,FOLLOW_KW_DESC_in_descStatement6336);  
                            stream_KW_DESC.add(KW_DESC371);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:29: (descOptions= KW_FORMATTED |descOptions= KW_EXTENDED |descOptions= KW_PRETTY )?
                    int alt101=4;
                    switch ( input.LA(1) ) {
                        case KW_FORMATTED:
                            {
                            switch ( input.LA(2) ) {
                                case Identifier:
                                    {
                                    alt101=1;
                                    }
                                    break;
                                case KW_PARTITION:
                                    {
                                    alt101=1;
                                    }
                                    break;
                                case KW_ADD:
                                case KW_ADMIN:
                                case KW_AFTER:
                                case KW_ALL:
                                case KW_ALTER:
                                case KW_ANALYZE:
                                case KW_ARCHIVE:
                                case KW_ARRAY:
                                case KW_AS:
                                case KW_ASC:
                                case KW_AUTHORIZATION:
                                case KW_BEFORE:
                                case KW_BETWEEN:
                                case KW_BIGINT:
                                case KW_BINARY:
                                case KW_BOOLEAN:
                                case KW_BOTH:
                                case KW_BUCKET:
                                case KW_BUCKETS:
                                case KW_BY:
                                case KW_CASCADE:
                                case KW_CHANGE:
                                case KW_CLUSTER:
                                case KW_CLUSTERED:
                                case KW_CLUSTERSTATUS:
                                case KW_COLLECTION:
                                case KW_COLUMNS:
                                case KW_COMMENT:
                                case KW_COMPACT:
                                case KW_COMPACTIONS:
                                case KW_COMPUTE:
                                case KW_CONCATENATE:
                                case KW_CONTINUE:
                                case KW_CREATE:
                                case KW_CUBE:
                                case KW_CURSOR:
                                case KW_DATA:
                                case KW_DATABASES:
                                case KW_DATE:
                                case KW_DATETIME:
                                case KW_DBPROPERTIES:
                                case KW_DECIMAL:
                                case KW_DEFAULT:
                                case KW_DEFERRED:
                                case KW_DEFINED:
                                case KW_DELETE:
                                case KW_DELIMITED:
                                case KW_DEPENDENCY:
                                case KW_DESC:
                                case KW_DESCRIBE:
                                case KW_DIRECTORIES:
                                case KW_DIRECTORY:
                                case KW_DISABLE:
                                case KW_DISTRIBUTE:
                                case KW_DOUBLE:
                                case KW_DROP:
                                case KW_ELEM_TYPE:
                                case KW_ENABLE:
                                case KW_ESCAPED:
                                case KW_EXCLUSIVE:
                                case KW_EXISTS:
                                case KW_EXPLAIN:
                                case KW_EXPORT:
                                case KW_EXTERNAL:
                                case KW_FALSE:
                                case KW_FETCH:
                                case KW_FIELDS:
                                case KW_FILE:
                                case KW_FILEFORMAT:
                                case KW_FIRST:
                                case KW_FLOAT:
                                case KW_FOR:
                                case KW_FORMAT:
                                case KW_FORMATTED:
                                case KW_FULL:
                                case KW_FUNCTIONS:
                                case KW_GRANT:
                                case KW_GROUP:
                                case KW_GROUPING:
                                case KW_HOLD_DDLTIME:
                                case KW_IDXPROPERTIES:
                                case KW_IGNORE:
                                case KW_IMPORT:
                                case KW_IN:
                                case KW_INDEX:
                                case KW_INDEXES:
                                case KW_INNER:
                                case KW_INPATH:
                                case KW_INPUTDRIVER:
                                case KW_INPUTFORMAT:
                                case KW_INSERT:
                                case KW_INT:
                                case KW_INTERSECT:
                                case KW_INTO:
                                case KW_IS:
                                case KW_ITEMS:
                                case KW_JAR:
                                case KW_KEYS:
                                case KW_KEY_TYPE:
                                case KW_LATERAL:
                                case KW_LEFT:
                                case KW_LIKE:
                                case KW_LIMIT:
                                case KW_LINES:
                                case KW_LOAD:
                                case KW_LOCAL:
                                case KW_LOCATION:
                                case KW_LOCK:
                                case KW_LOCKS:
                                case KW_LOGICAL:
                                case KW_LONG:
                                case KW_MAPJOIN:
                                case KW_MATERIALIZED:
                                case KW_MINUS:
                                case KW_MSCK:
                                case KW_NONE:
                                case KW_NOSCAN:
                                case KW_NO_DROP:
                                case KW_NULL:
                                case KW_OF:
                                case KW_OFFLINE:
                                case KW_OPTION:
                                case KW_ORDER:
                                case KW_OUT:
                                case KW_OUTER:
                                case KW_OUTPUTDRIVER:
                                case KW_OUTPUTFORMAT:
                                case KW_OVERWRITE:
                                case KW_OWNER:
                                case KW_PARTITIONED:
                                case KW_PARTITIONS:
                                case KW_PERCENT:
                                case KW_PLUS:
                                case KW_PRETTY:
                                case KW_PRINCIPALS:
                                case KW_PROCEDURE:
                                case KW_PROTECTION:
                                case KW_PURGE:
                                case KW_RANGE:
                                case KW_READ:
                                case KW_READONLY:
                                case KW_READS:
                                case KW_REBUILD:
                                case KW_RECORDREADER:
                                case KW_RECORDWRITER:
                                case KW_REGEXP:
                                case KW_RENAME:
                                case KW_REPAIR:
                                case KW_REPLACE:
                                case KW_RESTRICT:
                                case KW_REVOKE:
                                case KW_REWRITE:
                                case KW_RIGHT:
                                case KW_RLIKE:
                                case KW_ROLE:
                                case KW_ROLES:
                                case KW_ROLLUP:
                                case KW_ROW:
                                case KW_ROWS:
                                case KW_SCHEMA:
                                case KW_SCHEMAS:
                                case KW_SEMI:
                                case KW_SERDE:
                                case KW_SERDEPROPERTIES:
                                case KW_SET:
                                case KW_SETS:
                                case KW_SHARED:
                                case KW_SHOW:
                                case KW_SHOW_DATABASE:
                                case KW_SKEWED:
                                case KW_SMALLINT:
                                case KW_SORT:
                                case KW_SORTED:
                                case KW_SSL:
                                case KW_STATISTICS:
                                case KW_STORED:
                                case KW_STREAMTABLE:
                                case KW_STRING:
                                case KW_STRUCT:
                                case KW_TABLE:
                                case KW_TABLES:
                                case KW_TBLPROPERTIES:
                                case KW_TEMPORARY:
                                case KW_TERMINATED:
                                case KW_TIMESTAMP:
                                case KW_TINYINT:
                                case KW_TO:
                                case KW_TOUCH:
                                case KW_TRANSACTIONS:
                                case KW_TRIGGER:
                                case KW_TRUE:
                                case KW_TRUNCATE:
                                case KW_UNARCHIVE:
                                case KW_UNDO:
                                case KW_UNION:
                                case KW_UNIONTYPE:
                                case KW_UNLOCK:
                                case KW_UNSET:
                                case KW_UNSIGNED:
                                case KW_UPDATE:
                                case KW_USE:
                                case KW_USER:
                                case KW_USING:
                                case KW_UTC:
                                case KW_UTCTIMESTAMP:
                                case KW_VALUES:
                                case KW_VALUE_TYPE:
                                case KW_VIEW:
                                case KW_WHILE:
                                case KW_WITH:
                                    {
                                    alt101=1;
                                    }
                                    break;
                            }

                            }
                            break;
                        case KW_EXTENDED:
                            {
                            alt101=2;
                            }
                            break;
                        case KW_PRETTY:
                            {
                            switch ( input.LA(2) ) {
                                case Identifier:
                                    {
                                    alt101=3;
                                    }
                                    break;
                                case KW_PARTITION:
                                    {
                                    alt101=3;
                                    }
                                    break;
                                case KW_ADD:
                                case KW_ADMIN:
                                case KW_AFTER:
                                case KW_ALL:
                                case KW_ALTER:
                                case KW_ANALYZE:
                                case KW_ARCHIVE:
                                case KW_ARRAY:
                                case KW_AS:
                                case KW_ASC:
                                case KW_AUTHORIZATION:
                                case KW_BEFORE:
                                case KW_BETWEEN:
                                case KW_BIGINT:
                                case KW_BINARY:
                                case KW_BOOLEAN:
                                case KW_BOTH:
                                case KW_BUCKET:
                                case KW_BUCKETS:
                                case KW_BY:
                                case KW_CASCADE:
                                case KW_CHANGE:
                                case KW_CLUSTER:
                                case KW_CLUSTERED:
                                case KW_CLUSTERSTATUS:
                                case KW_COLLECTION:
                                case KW_COLUMNS:
                                case KW_COMMENT:
                                case KW_COMPACT:
                                case KW_COMPACTIONS:
                                case KW_COMPUTE:
                                case KW_CONCATENATE:
                                case KW_CONTINUE:
                                case KW_CREATE:
                                case KW_CUBE:
                                case KW_CURSOR:
                                case KW_DATA:
                                case KW_DATABASES:
                                case KW_DATE:
                                case KW_DATETIME:
                                case KW_DBPROPERTIES:
                                case KW_DECIMAL:
                                case KW_DEFAULT:
                                case KW_DEFERRED:
                                case KW_DEFINED:
                                case KW_DELETE:
                                case KW_DELIMITED:
                                case KW_DEPENDENCY:
                                case KW_DESC:
                                case KW_DESCRIBE:
                                case KW_DIRECTORIES:
                                case KW_DIRECTORY:
                                case KW_DISABLE:
                                case KW_DISTRIBUTE:
                                case KW_DOUBLE:
                                case KW_DROP:
                                case KW_ELEM_TYPE:
                                case KW_ENABLE:
                                case KW_ESCAPED:
                                case KW_EXCLUSIVE:
                                case KW_EXISTS:
                                case KW_EXPLAIN:
                                case KW_EXPORT:
                                case KW_EXTERNAL:
                                case KW_FALSE:
                                case KW_FETCH:
                                case KW_FIELDS:
                                case KW_FILE:
                                case KW_FILEFORMAT:
                                case KW_FIRST:
                                case KW_FLOAT:
                                case KW_FOR:
                                case KW_FORMAT:
                                case KW_FORMATTED:
                                case KW_FULL:
                                case KW_FUNCTIONS:
                                case KW_GRANT:
                                case KW_GROUP:
                                case KW_GROUPING:
                                case KW_HOLD_DDLTIME:
                                case KW_IDXPROPERTIES:
                                case KW_IGNORE:
                                case KW_IMPORT:
                                case KW_IN:
                                case KW_INDEX:
                                case KW_INDEXES:
                                case KW_INNER:
                                case KW_INPATH:
                                case KW_INPUTDRIVER:
                                case KW_INPUTFORMAT:
                                case KW_INSERT:
                                case KW_INT:
                                case KW_INTERSECT:
                                case KW_INTO:
                                case KW_IS:
                                case KW_ITEMS:
                                case KW_JAR:
                                case KW_KEYS:
                                case KW_KEY_TYPE:
                                case KW_LATERAL:
                                case KW_LEFT:
                                case KW_LIKE:
                                case KW_LIMIT:
                                case KW_LINES:
                                case KW_LOAD:
                                case KW_LOCAL:
                                case KW_LOCATION:
                                case KW_LOCK:
                                case KW_LOCKS:
                                case KW_LOGICAL:
                                case KW_LONG:
                                case KW_MAPJOIN:
                                case KW_MATERIALIZED:
                                case KW_MINUS:
                                case KW_MSCK:
                                case KW_NONE:
                                case KW_NOSCAN:
                                case KW_NO_DROP:
                                case KW_NULL:
                                case KW_OF:
                                case KW_OFFLINE:
                                case KW_OPTION:
                                case KW_ORDER:
                                case KW_OUT:
                                case KW_OUTER:
                                case KW_OUTPUTDRIVER:
                                case KW_OUTPUTFORMAT:
                                case KW_OVERWRITE:
                                case KW_OWNER:
                                case KW_PARTITIONED:
                                case KW_PARTITIONS:
                                case KW_PERCENT:
                                case KW_PLUS:
                                case KW_PRETTY:
                                case KW_PRINCIPALS:
                                case KW_PROCEDURE:
                                case KW_PROTECTION:
                                case KW_PURGE:
                                case KW_RANGE:
                                case KW_READ:
                                case KW_READONLY:
                                case KW_READS:
                                case KW_REBUILD:
                                case KW_RECORDREADER:
                                case KW_RECORDWRITER:
                                case KW_REGEXP:
                                case KW_RENAME:
                                case KW_REPAIR:
                                case KW_REPLACE:
                                case KW_RESTRICT:
                                case KW_REVOKE:
                                case KW_REWRITE:
                                case KW_RIGHT:
                                case KW_RLIKE:
                                case KW_ROLE:
                                case KW_ROLES:
                                case KW_ROLLUP:
                                case KW_ROW:
                                case KW_ROWS:
                                case KW_SCHEMA:
                                case KW_SCHEMAS:
                                case KW_SEMI:
                                case KW_SERDE:
                                case KW_SERDEPROPERTIES:
                                case KW_SET:
                                case KW_SETS:
                                case KW_SHARED:
                                case KW_SHOW:
                                case KW_SHOW_DATABASE:
                                case KW_SKEWED:
                                case KW_SMALLINT:
                                case KW_SORT:
                                case KW_SORTED:
                                case KW_SSL:
                                case KW_STATISTICS:
                                case KW_STORED:
                                case KW_STREAMTABLE:
                                case KW_STRING:
                                case KW_STRUCT:
                                case KW_TABLE:
                                case KW_TABLES:
                                case KW_TBLPROPERTIES:
                                case KW_TEMPORARY:
                                case KW_TERMINATED:
                                case KW_TIMESTAMP:
                                case KW_TINYINT:
                                case KW_TO:
                                case KW_TOUCH:
                                case KW_TRANSACTIONS:
                                case KW_TRIGGER:
                                case KW_TRUE:
                                case KW_TRUNCATE:
                                case KW_UNARCHIVE:
                                case KW_UNDO:
                                case KW_UNION:
                                case KW_UNIONTYPE:
                                case KW_UNLOCK:
                                case KW_UNSET:
                                case KW_UNSIGNED:
                                case KW_UPDATE:
                                case KW_USE:
                                case KW_USER:
                                case KW_USING:
                                case KW_UTC:
                                case KW_UTCTIMESTAMP:
                                case KW_VALUES:
                                case KW_VALUE_TYPE:
                                case KW_VIEW:
                                case KW_WHILE:
                                case KW_WITH:
                                    {
                                    alt101=3;
                                    }
                                    break;
                            }

                            }
                            break;
                    }

                    switch (alt101) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:30: descOptions= KW_FORMATTED
                            {
                            descOptions=(Token)match(input,KW_FORMATTED,FOLLOW_KW_FORMATTED_in_descStatement6342);  
                            stream_KW_FORMATTED.add(descOptions);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:55: descOptions= KW_EXTENDED
                            {
                            descOptions=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_descStatement6346);  
                            stream_KW_EXTENDED.add(descOptions);


                            }
                            break;
                        case 3 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:79: descOptions= KW_PRETTY
                            {
                            descOptions=(Token)match(input,KW_PRETTY,FOLLOW_KW_PRETTY_in_descStatement6350);  
                            stream_KW_PRETTY.add(descOptions);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:103: (parttype= descPartTypeExpr )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:104: parttype= descPartTypeExpr
                    {
                    pushFollow(FOLLOW_descPartTypeExpr_in_descStatement6357);
                    parttype=descPartTypeExpr();

                    state._fsp--;

                    stream_descPartTypeExpr.add(parttype.getTree());

                    }


                    // AST REWRITE
                    // elements: descOptions, parttype
                    // token labels: descOptions
                    // rule labels: retval, parttype
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_descOptions=new RewriteRuleTokenStream(adaptor,"token descOptions",descOptions);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1316:131: -> ^( TOK_DESCTABLE $parttype ( $descOptions)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:134: ^( TOK_DESCTABLE $parttype ( $descOptions)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DESCTABLE, "TOK_DESCTABLE")
                        , root_1);

                        adaptor.addChild(root_1, stream_parttype.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1316:161: ( $descOptions)?
                        if ( stream_descOptions.hasNext() ) {
                            adaptor.addChild(root_1, stream_descOptions.nextNode());

                        }
                        stream_descOptions.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:7: ( KW_DESCRIBE | KW_DESC ) KW_FUNCTION ( KW_EXTENDED )? (name= descFuncNames )
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:7: ( KW_DESCRIBE | KW_DESC )
                    int alt102=2;
                    switch ( input.LA(1) ) {
                    case KW_DESCRIBE:
                        {
                        alt102=1;
                        }
                        break;
                    case KW_DESC:
                        {
                        alt102=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 102, 0, input);

                        throw nvae;

                    }

                    switch (alt102) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:8: KW_DESCRIBE
                            {
                            KW_DESCRIBE372=(Token)match(input,KW_DESCRIBE,FOLLOW_KW_DESCRIBE_in_descStatement6380);  
                            stream_KW_DESCRIBE.add(KW_DESCRIBE372);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:20: KW_DESC
                            {
                            KW_DESC373=(Token)match(input,KW_DESC,FOLLOW_KW_DESC_in_descStatement6382);  
                            stream_KW_DESC.add(KW_DESC373);


                            }
                            break;

                    }


                    KW_FUNCTION374=(Token)match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_descStatement6385);  
                    stream_KW_FUNCTION.add(KW_FUNCTION374);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:41: ( KW_EXTENDED )?
                    int alt103=2;
                    switch ( input.LA(1) ) {
                        case KW_EXTENDED:
                            {
                            alt103=1;
                            }
                            break;
                    }

                    switch (alt103) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:41: KW_EXTENDED
                            {
                            KW_EXTENDED375=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_descStatement6387);  
                            stream_KW_EXTENDED.add(KW_EXTENDED375);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:54: (name= descFuncNames )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:55: name= descFuncNames
                    {
                    pushFollow(FOLLOW_descFuncNames_in_descStatement6393);
                    name=descFuncNames();

                    state._fsp--;

                    stream_descFuncNames.add(name.getTree());

                    }


                    // AST REWRITE
                    // elements: name, KW_EXTENDED
                    // token labels: 
                    // rule labels: retval, name
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1317:75: -> ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:78: ^( TOK_DESCFUNCTION $name ( KW_EXTENDED )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DESCFUNCTION, "TOK_DESCFUNCTION")
                        , root_1);

                        adaptor.addChild(root_1, stream_name.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1317:103: ( KW_EXTENDED )?
                        if ( stream_KW_EXTENDED.hasNext() ) {
                            adaptor.addChild(root_1, 
                            stream_KW_EXTENDED.nextNode()
                            );

                        }
                        stream_KW_EXTENDED.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:7: ( KW_DESCRIBE | KW_DESC ) ( KW_DATABASE | KW_SCHEMA ) ( KW_EXTENDED )? (dbName= identifier )
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:7: ( KW_DESCRIBE | KW_DESC )
                    int alt104=2;
                    switch ( input.LA(1) ) {
                    case KW_DESCRIBE:
                        {
                        alt104=1;
                        }
                        break;
                    case KW_DESC:
                        {
                        alt104=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 104, 0, input);

                        throw nvae;

                    }

                    switch (alt104) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:8: KW_DESCRIBE
                            {
                            KW_DESCRIBE376=(Token)match(input,KW_DESCRIBE,FOLLOW_KW_DESCRIBE_in_descStatement6415);  
                            stream_KW_DESCRIBE.add(KW_DESCRIBE376);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:20: KW_DESC
                            {
                            KW_DESC377=(Token)match(input,KW_DESC,FOLLOW_KW_DESC_in_descStatement6417);  
                            stream_KW_DESC.add(KW_DESC377);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:29: ( KW_DATABASE | KW_SCHEMA )
                    int alt105=2;
                    switch ( input.LA(1) ) {
                    case KW_DATABASE:
                        {
                        alt105=1;
                        }
                        break;
                    case KW_SCHEMA:
                        {
                        alt105=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 105, 0, input);

                        throw nvae;

                    }

                    switch (alt105) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:30: KW_DATABASE
                            {
                            KW_DATABASE378=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_descStatement6421);  
                            stream_KW_DATABASE.add(KW_DATABASE378);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:42: KW_SCHEMA
                            {
                            KW_SCHEMA379=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_descStatement6423);  
                            stream_KW_SCHEMA.add(KW_SCHEMA379);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:53: ( KW_EXTENDED )?
                    int alt106=2;
                    switch ( input.LA(1) ) {
                        case KW_EXTENDED:
                            {
                            alt106=1;
                            }
                            break;
                    }

                    switch (alt106) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:53: KW_EXTENDED
                            {
                            KW_EXTENDED380=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_descStatement6426);  
                            stream_KW_EXTENDED.add(KW_EXTENDED380);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:66: (dbName= identifier )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:67: dbName= identifier
                    {
                    pushFollow(FOLLOW_identifier_in_descStatement6432);
                    dbName=identifier();

                    state._fsp--;

                    stream_identifier.add(dbName.getTree());

                    }


                    // AST REWRITE
                    // elements: dbName, KW_EXTENDED
                    // token labels: 
                    // rule labels: retval, dbName
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_dbName=new RewriteRuleSubtreeStream(adaptor,"rule dbName",dbName!=null?dbName.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1318:86: -> ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:89: ^( TOK_DESCDATABASE $dbName ( KW_EXTENDED )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DESCDATABASE, "TOK_DESCDATABASE")
                        , root_1);

                        adaptor.addChild(root_1, stream_dbName.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1318:116: ( KW_EXTENDED )?
                        if ( stream_KW_EXTENDED.hasNext() ) {
                            adaptor.addChild(root_1, 
                            stream_KW_EXTENDED.nextNode()
                            );

                        }
                        stream_KW_EXTENDED.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "descStatement"


    public static class analyzeStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "analyzeStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1322:1: analyzeStatement : KW_ANALYZE KW_TABLE (parttype= tableOrPartition ) KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | (partialscan= KW_PARTIALSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )? -> ^( TOK_ANALYZE $parttype ( $noscan)? ( $partialscan)? ( KW_COLUMNS )? ( $statsColumnName)? ) ;
    public final HiveParser.analyzeStatement_return analyzeStatement() throws RecognitionException {
        HiveParser.analyzeStatement_return retval = new HiveParser.analyzeStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token noscan=null;
        Token partialscan=null;
        Token KW_ANALYZE381=null;
        Token KW_TABLE382=null;
        Token KW_COMPUTE383=null;
        Token KW_STATISTICS384=null;
        Token KW_FOR385=null;
        Token KW_COLUMNS386=null;
        HiveParser_IdentifiersParser.tableOrPartition_return parttype =null;

        HiveParser.columnNameList_return statsColumnName =null;


        CommonTree noscan_tree=null;
        CommonTree partialscan_tree=null;
        CommonTree KW_ANALYZE381_tree=null;
        CommonTree KW_TABLE382_tree=null;
        CommonTree KW_COMPUTE383_tree=null;
        CommonTree KW_STATISTICS384_tree=null;
        CommonTree KW_FOR385_tree=null;
        CommonTree KW_COLUMNS386_tree=null;
        RewriteRuleTokenStream stream_KW_ANALYZE=new RewriteRuleTokenStream(adaptor,"token KW_ANALYZE");
        RewriteRuleTokenStream stream_KW_NOSCAN=new RewriteRuleTokenStream(adaptor,"token KW_NOSCAN");
        RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
        RewriteRuleTokenStream stream_KW_STATISTICS=new RewriteRuleTokenStream(adaptor,"token KW_STATISTICS");
        RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_KW_COMPUTE=new RewriteRuleTokenStream(adaptor,"token KW_COMPUTE");
        RewriteRuleTokenStream stream_KW_PARTIALSCAN=new RewriteRuleTokenStream(adaptor,"token KW_PARTIALSCAN");
        RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
         pushMsg("analyze statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:5: ( KW_ANALYZE KW_TABLE (parttype= tableOrPartition ) KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | (partialscan= KW_PARTIALSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )? -> ^( TOK_ANALYZE $parttype ( $noscan)? ( $partialscan)? ( KW_COLUMNS )? ( $statsColumnName)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:7: KW_ANALYZE KW_TABLE (parttype= tableOrPartition ) KW_COMPUTE KW_STATISTICS ( (noscan= KW_NOSCAN ) | (partialscan= KW_PARTIALSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )?
            {
            KW_ANALYZE381=(Token)match(input,KW_ANALYZE,FOLLOW_KW_ANALYZE_in_analyzeStatement6473);  
            stream_KW_ANALYZE.add(KW_ANALYZE381);


            KW_TABLE382=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_analyzeStatement6475);  
            stream_KW_TABLE.add(KW_TABLE382);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:27: (parttype= tableOrPartition )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:28: parttype= tableOrPartition
            {
            pushFollow(FOLLOW_tableOrPartition_in_analyzeStatement6480);
            parttype=tableOrPartition();

            state._fsp--;

            stream_tableOrPartition.add(parttype.getTree());

            }


            KW_COMPUTE383=(Token)match(input,KW_COMPUTE,FOLLOW_KW_COMPUTE_in_analyzeStatement6483);  
            stream_KW_COMPUTE.add(KW_COMPUTE383);


            KW_STATISTICS384=(Token)match(input,KW_STATISTICS,FOLLOW_KW_STATISTICS_in_analyzeStatement6485);  
            stream_KW_STATISTICS.add(KW_STATISTICS384);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:80: ( (noscan= KW_NOSCAN ) | (partialscan= KW_PARTIALSCAN ) | ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? ) )?
            int alt109=4;
            switch ( input.LA(1) ) {
                case KW_NOSCAN:
                    {
                    alt109=1;
                    }
                    break;
                case KW_PARTIALSCAN:
                    {
                    alt109=2;
                    }
                    break;
                case KW_FOR:
                    {
                    alt109=3;
                    }
                    break;
            }

            switch (alt109) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:81: (noscan= KW_NOSCAN )
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:81: (noscan= KW_NOSCAN )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:82: noscan= KW_NOSCAN
                    {
                    noscan=(Token)match(input,KW_NOSCAN,FOLLOW_KW_NOSCAN_in_analyzeStatement6491);  
                    stream_KW_NOSCAN.add(noscan);


                    }


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:102: (partialscan= KW_PARTIALSCAN )
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:102: (partialscan= KW_PARTIALSCAN )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1325:103: partialscan= KW_PARTIALSCAN
                    {
                    partialscan=(Token)match(input,KW_PARTIALSCAN,FOLLOW_KW_PARTIALSCAN_in_analyzeStatement6499);  
                    stream_KW_PARTIALSCAN.add(partialscan);


                    }


                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1326:57: ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? )
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1326:57: ( KW_FOR KW_COLUMNS (statsColumnName= columnNameList )? )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1326:58: KW_FOR KW_COLUMNS (statsColumnName= columnNameList )?
                    {
                    KW_FOR385=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_analyzeStatement6560);  
                    stream_KW_FOR.add(KW_FOR385);


                    KW_COLUMNS386=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_analyzeStatement6562);  
                    stream_KW_COLUMNS.add(KW_COLUMNS386);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1326:76: (statsColumnName= columnNameList )?
                    int alt108=2;
                    switch ( input.LA(1) ) {
                        case Identifier:
                        case KW_ADD:
                        case KW_ADMIN:
                        case KW_AFTER:
                        case KW_ALL:
                        case KW_ALTER:
                        case KW_ANALYZE:
                        case KW_ARCHIVE:
                        case KW_ARRAY:
                        case KW_AS:
                        case KW_ASC:
                        case KW_AUTHORIZATION:
                        case KW_BEFORE:
                        case KW_BETWEEN:
                        case KW_BIGINT:
                        case KW_BINARY:
                        case KW_BOOLEAN:
                        case KW_BOTH:
                        case KW_BUCKET:
                        case KW_BUCKETS:
                        case KW_BY:
                        case KW_CASCADE:
                        case KW_CHANGE:
                        case KW_CLUSTER:
                        case KW_CLUSTERED:
                        case KW_CLUSTERSTATUS:
                        case KW_COLLECTION:
                        case KW_COLUMNS:
                        case KW_COMMENT:
                        case KW_COMPACT:
                        case KW_COMPACTIONS:
                        case KW_COMPUTE:
                        case KW_CONCATENATE:
                        case KW_CONTINUE:
                        case KW_CREATE:
                        case KW_CUBE:
                        case KW_CURSOR:
                        case KW_DATA:
                        case KW_DATABASES:
                        case KW_DATE:
                        case KW_DATETIME:
                        case KW_DBPROPERTIES:
                        case KW_DECIMAL:
                        case KW_DEFAULT:
                        case KW_DEFERRED:
                        case KW_DEFINED:
                        case KW_DELETE:
                        case KW_DELIMITED:
                        case KW_DEPENDENCY:
                        case KW_DESC:
                        case KW_DESCRIBE:
                        case KW_DIRECTORIES:
                        case KW_DIRECTORY:
                        case KW_DISABLE:
                        case KW_DISTRIBUTE:
                        case KW_DOUBLE:
                        case KW_DROP:
                        case KW_ELEM_TYPE:
                        case KW_ENABLE:
                        case KW_ESCAPED:
                        case KW_EXCLUSIVE:
                        case KW_EXISTS:
                        case KW_EXPLAIN:
                        case KW_EXPORT:
                        case KW_EXTERNAL:
                        case KW_FALSE:
                        case KW_FETCH:
                        case KW_FIELDS:
                        case KW_FILE:
                        case KW_FILEFORMAT:
                        case KW_FIRST:
                        case KW_FLOAT:
                        case KW_FOR:
                        case KW_FORMAT:
                        case KW_FORMATTED:
                        case KW_FULL:
                        case KW_FUNCTIONS:
                        case KW_GRANT:
                        case KW_GROUP:
                        case KW_GROUPING:
                        case KW_HOLD_DDLTIME:
                        case KW_IDXPROPERTIES:
                        case KW_IGNORE:
                        case KW_IMPORT:
                        case KW_IN:
                        case KW_INDEX:
                        case KW_INDEXES:
                        case KW_INNER:
                        case KW_INPATH:
                        case KW_INPUTDRIVER:
                        case KW_INPUTFORMAT:
                        case KW_INSERT:
                        case KW_INT:
                        case KW_INTERSECT:
                        case KW_INTO:
                        case KW_IS:
                        case KW_ITEMS:
                        case KW_JAR:
                        case KW_KEYS:
                        case KW_KEY_TYPE:
                        case KW_LATERAL:
                        case KW_LEFT:
                        case KW_LIKE:
                        case KW_LIMIT:
                        case KW_LINES:
                        case KW_LOAD:
                        case KW_LOCAL:
                        case KW_LOCATION:
                        case KW_LOCK:
                        case KW_LOCKS:
                        case KW_LOGICAL:
                        case KW_LONG:
                        case KW_MAPJOIN:
                        case KW_MATERIALIZED:
                        case KW_MINUS:
                        case KW_MSCK:
                        case KW_NONE:
                        case KW_NOSCAN:
                        case KW_NO_DROP:
                        case KW_NULL:
                        case KW_OF:
                        case KW_OFFLINE:
                        case KW_OPTION:
                        case KW_ORDER:
                        case KW_OUT:
                        case KW_OUTER:
                        case KW_OUTPUTDRIVER:
                        case KW_OUTPUTFORMAT:
                        case KW_OVERWRITE:
                        case KW_OWNER:
                        case KW_PARTITION:
                        case KW_PARTITIONED:
                        case KW_PARTITIONS:
                        case KW_PERCENT:
                        case KW_PLUS:
                        case KW_PRETTY:
                        case KW_PRINCIPALS:
                        case KW_PROCEDURE:
                        case KW_PROTECTION:
                        case KW_PURGE:
                        case KW_RANGE:
                        case KW_READ:
                        case KW_READONLY:
                        case KW_READS:
                        case KW_REBUILD:
                        case KW_RECORDREADER:
                        case KW_RECORDWRITER:
                        case KW_REGEXP:
                        case KW_RENAME:
                        case KW_REPAIR:
                        case KW_REPLACE:
                        case KW_RESTRICT:
                        case KW_REVOKE:
                        case KW_REWRITE:
                        case KW_RIGHT:
                        case KW_RLIKE:
                        case KW_ROLE:
                        case KW_ROLES:
                        case KW_ROLLUP:
                        case KW_ROW:
                        case KW_ROWS:
                        case KW_SCHEMA:
                        case KW_SCHEMAS:
                        case KW_SEMI:
                        case KW_SERDE:
                        case KW_SERDEPROPERTIES:
                        case KW_SET:
                        case KW_SETS:
                        case KW_SHARED:
                        case KW_SHOW:
                        case KW_SHOW_DATABASE:
                        case KW_SKEWED:
                        case KW_SMALLINT:
                        case KW_SORT:
                        case KW_SORTED:
                        case KW_SSL:
                        case KW_STATISTICS:
                        case KW_STORED:
                        case KW_STREAMTABLE:
                        case KW_STRING:
                        case KW_STRUCT:
                        case KW_TABLE:
                        case KW_TABLES:
                        case KW_TBLPROPERTIES:
                        case KW_TEMPORARY:
                        case KW_TERMINATED:
                        case KW_TIMESTAMP:
                        case KW_TINYINT:
                        case KW_TO:
                        case KW_TOUCH:
                        case KW_TRANSACTIONS:
                        case KW_TRIGGER:
                        case KW_TRUE:
                        case KW_TRUNCATE:
                        case KW_UNARCHIVE:
                        case KW_UNDO:
                        case KW_UNION:
                        case KW_UNIONTYPE:
                        case KW_UNLOCK:
                        case KW_UNSET:
                        case KW_UNSIGNED:
                        case KW_UPDATE:
                        case KW_USE:
                        case KW_USER:
                        case KW_USING:
                        case KW_UTC:
                        case KW_UTCTIMESTAMP:
                        case KW_VALUES:
                        case KW_VALUE_TYPE:
                        case KW_VIEW:
                        case KW_WHILE:
                        case KW_WITH:
                            {
                            alt108=1;
                            }
                            break;
                    }

                    switch (alt108) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1326:77: statsColumnName= columnNameList
                            {
                            pushFollow(FOLLOW_columnNameList_in_analyzeStatement6567);
                            statsColumnName=columnNameList();

                            state._fsp--;

                            stream_columnNameList.add(statsColumnName.getTree());

                            }
                            break;

                    }


                    }


                    }
                    break;

            }


            // AST REWRITE
            // elements: noscan, statsColumnName, KW_COLUMNS, partialscan, parttype
            // token labels: partialscan, noscan
            // rule labels: retval, parttype, statsColumnName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_partialscan=new RewriteRuleTokenStream(adaptor,"token partialscan",partialscan);
            RewriteRuleTokenStream stream_noscan=new RewriteRuleTokenStream(adaptor,"token noscan",noscan);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.tree:null);
            RewriteRuleSubtreeStream stream_statsColumnName=new RewriteRuleSubtreeStream(adaptor,"rule statsColumnName",statsColumnName!=null?statsColumnName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1327:7: -> ^( TOK_ANALYZE $parttype ( $noscan)? ( $partialscan)? ( KW_COLUMNS )? ( $statsColumnName)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1327:10: ^( TOK_ANALYZE $parttype ( $noscan)? ( $partialscan)? ( KW_COLUMNS )? ( $statsColumnName)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ANALYZE, "TOK_ANALYZE")
                , root_1);

                adaptor.addChild(root_1, stream_parttype.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1327:35: ( $noscan)?
                if ( stream_noscan.hasNext() ) {
                    adaptor.addChild(root_1, stream_noscan.nextNode());

                }
                stream_noscan.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1327:44: ( $partialscan)?
                if ( stream_partialscan.hasNext() ) {
                    adaptor.addChild(root_1, stream_partialscan.nextNode());

                }
                stream_partialscan.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1327:57: ( KW_COLUMNS )?
                if ( stream_KW_COLUMNS.hasNext() ) {
                    adaptor.addChild(root_1, 
                    stream_KW_COLUMNS.nextNode()
                    );

                }
                stream_KW_COLUMNS.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1327:70: ( $statsColumnName)?
                if ( stream_statsColumnName.hasNext() ) {
                    adaptor.addChild(root_1, stream_statsColumnName.nextTree());

                }
                stream_statsColumnName.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "analyzeStatement"


    public static class showStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1330:1: showStatement : ( KW_SHOW ( KW_DATABASES | KW_SCHEMAS ) ( KW_LIKE showStmtIdentifier )? -> ^( TOK_SHOWDATABASES ( showStmtIdentifier )? ) | KW_SHOW KW_TABLES ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_COLUMNS ( KW_FROM | KW_IN ) tableName ( ( KW_FROM | KW_IN ) db_name= identifier )? -> ^( TOK_SHOWCOLUMNS tableName ( $db_name)? ) | KW_SHOW KW_FUNCTIONS ( showFunctionIdentifier )? -> ^( TOK_SHOWFUNCTIONS ( showFunctionIdentifier )? ) | KW_SHOW KW_PARTITIONS tabName= tableName ( partitionSpec )? -> ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? ) | KW_SHOW KW_CREATE KW_TABLE tabName= tableName -> ^( TOK_SHOW_CREATETABLE $tabName) | KW_SHOW KW_TABLE KW_EXTENDED ( ( KW_FROM | KW_IN ) db_name= identifier )? KW_LIKE showStmtIdentifier ( partitionSpec )? -> ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? ) | KW_SHOW KW_TBLPROPERTIES tableName ( LPAREN prptyName= StringLiteral RPAREN )? -> ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? ) | KW_SHOW KW_LOCKS (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? ) | KW_SHOW KW_LOCKS ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? ) | KW_SHOW (showOptions= KW_FORMATTED )? ( KW_INDEX | KW_INDEXES ) KW_ON showStmtIdentifier ( ( KW_FROM | KW_IN ) db_name= identifier )? -> ^( TOK_SHOWINDEXES showStmtIdentifier ( $showOptions)? ( $db_name)? ) | KW_SHOW KW_COMPACTIONS -> ^( TOK_SHOW_COMPACTIONS ) | KW_SHOW KW_TRANSACTIONS -> ^( TOK_SHOW_TRANSACTIONS ) | KW_SHOW KW_CONF StringLiteral -> ^( TOK_SHOWCONF StringLiteral ) );
    public final HiveParser.showStatement_return showStatement() throws RecognitionException {
        HiveParser.showStatement_return retval = new HiveParser.showStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token prptyName=null;
        Token isExtended=null;
        Token dbName=null;
        Token showOptions=null;
        Token KW_SHOW387=null;
        Token KW_DATABASES388=null;
        Token KW_SCHEMAS389=null;
        Token KW_LIKE390=null;
        Token KW_SHOW392=null;
        Token KW_TABLES393=null;
        Token KW_FROM394=null;
        Token KW_IN395=null;
        Token KW_LIKE396=null;
        Token KW_SHOW399=null;
        Token KW_COLUMNS400=null;
        Token KW_FROM401=null;
        Token KW_IN402=null;
        Token KW_FROM404=null;
        Token KW_IN405=null;
        Token KW_SHOW406=null;
        Token KW_FUNCTIONS407=null;
        Token KW_SHOW409=null;
        Token KW_PARTITIONS410=null;
        Token KW_SHOW412=null;
        Token KW_CREATE413=null;
        Token KW_TABLE414=null;
        Token KW_SHOW415=null;
        Token KW_TABLE416=null;
        Token KW_EXTENDED417=null;
        Token KW_FROM418=null;
        Token KW_IN419=null;
        Token KW_LIKE420=null;
        Token KW_SHOW423=null;
        Token KW_TBLPROPERTIES424=null;
        Token LPAREN426=null;
        Token RPAREN427=null;
        Token KW_SHOW428=null;
        Token KW_LOCKS429=null;
        Token KW_SHOW430=null;
        Token KW_LOCKS431=null;
        Token KW_DATABASE432=null;
        Token KW_SCHEMA433=null;
        Token KW_SHOW434=null;
        Token KW_INDEX435=null;
        Token KW_INDEXES436=null;
        Token KW_ON437=null;
        Token KW_FROM439=null;
        Token KW_IN440=null;
        Token KW_SHOW441=null;
        Token KW_COMPACTIONS442=null;
        Token KW_SHOW443=null;
        Token KW_TRANSACTIONS444=null;
        Token KW_SHOW445=null;
        Token KW_CONF446=null;
        Token StringLiteral447=null;
        HiveParser_IdentifiersParser.identifier_return db_name =null;

        HiveParser_FromClauseParser.tableName_return tabName =null;

        HiveParser.partTypeExpr_return parttype =null;

        HiveParser.showStmtIdentifier_return showStmtIdentifier391 =null;

        HiveParser.showStmtIdentifier_return showStmtIdentifier397 =null;

        HiveParser.showStmtIdentifier_return showStmtIdentifier398 =null;

        HiveParser_FromClauseParser.tableName_return tableName403 =null;

        HiveParser.showFunctionIdentifier_return showFunctionIdentifier408 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec411 =null;

        HiveParser.showStmtIdentifier_return showStmtIdentifier421 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec422 =null;

        HiveParser_FromClauseParser.tableName_return tableName425 =null;

        HiveParser.showStmtIdentifier_return showStmtIdentifier438 =null;


        CommonTree prptyName_tree=null;
        CommonTree isExtended_tree=null;
        CommonTree dbName_tree=null;
        CommonTree showOptions_tree=null;
        CommonTree KW_SHOW387_tree=null;
        CommonTree KW_DATABASES388_tree=null;
        CommonTree KW_SCHEMAS389_tree=null;
        CommonTree KW_LIKE390_tree=null;
        CommonTree KW_SHOW392_tree=null;
        CommonTree KW_TABLES393_tree=null;
        CommonTree KW_FROM394_tree=null;
        CommonTree KW_IN395_tree=null;
        CommonTree KW_LIKE396_tree=null;
        CommonTree KW_SHOW399_tree=null;
        CommonTree KW_COLUMNS400_tree=null;
        CommonTree KW_FROM401_tree=null;
        CommonTree KW_IN402_tree=null;
        CommonTree KW_FROM404_tree=null;
        CommonTree KW_IN405_tree=null;
        CommonTree KW_SHOW406_tree=null;
        CommonTree KW_FUNCTIONS407_tree=null;
        CommonTree KW_SHOW409_tree=null;
        CommonTree KW_PARTITIONS410_tree=null;
        CommonTree KW_SHOW412_tree=null;
        CommonTree KW_CREATE413_tree=null;
        CommonTree KW_TABLE414_tree=null;
        CommonTree KW_SHOW415_tree=null;
        CommonTree KW_TABLE416_tree=null;
        CommonTree KW_EXTENDED417_tree=null;
        CommonTree KW_FROM418_tree=null;
        CommonTree KW_IN419_tree=null;
        CommonTree KW_LIKE420_tree=null;
        CommonTree KW_SHOW423_tree=null;
        CommonTree KW_TBLPROPERTIES424_tree=null;
        CommonTree LPAREN426_tree=null;
        CommonTree RPAREN427_tree=null;
        CommonTree KW_SHOW428_tree=null;
        CommonTree KW_LOCKS429_tree=null;
        CommonTree KW_SHOW430_tree=null;
        CommonTree KW_LOCKS431_tree=null;
        CommonTree KW_DATABASE432_tree=null;
        CommonTree KW_SCHEMA433_tree=null;
        CommonTree KW_SHOW434_tree=null;
        CommonTree KW_INDEX435_tree=null;
        CommonTree KW_INDEXES436_tree=null;
        CommonTree KW_ON437_tree=null;
        CommonTree KW_FROM439_tree=null;
        CommonTree KW_IN440_tree=null;
        CommonTree KW_SHOW441_tree=null;
        CommonTree KW_COMPACTIONS442_tree=null;
        CommonTree KW_SHOW443_tree=null;
        CommonTree KW_TRANSACTIONS444_tree=null;
        CommonTree KW_SHOW445_tree=null;
        CommonTree KW_CONF446_tree=null;
        CommonTree StringLiteral447_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_LIKE=new RewriteRuleTokenStream(adaptor,"token KW_LIKE");
        RewriteRuleTokenStream stream_KW_COLUMNS=new RewriteRuleTokenStream(adaptor,"token KW_COLUMNS");
        RewriteRuleTokenStream stream_KW_TRANSACTIONS=new RewriteRuleTokenStream(adaptor,"token KW_TRANSACTIONS");
        RewriteRuleTokenStream stream_KW_FUNCTIONS=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTIONS");
        RewriteRuleTokenStream stream_KW_CONF=new RewriteRuleTokenStream(adaptor,"token KW_CONF");
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_KW_INDEXES=new RewriteRuleTokenStream(adaptor,"token KW_INDEXES");
        RewriteRuleTokenStream stream_Identifier=new RewriteRuleTokenStream(adaptor,"token Identifier");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
        RewriteRuleTokenStream stream_KW_TBLPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_TBLPROPERTIES");
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_LOCKS=new RewriteRuleTokenStream(adaptor,"token KW_LOCKS");
        RewriteRuleTokenStream stream_KW_FORMATTED=new RewriteRuleTokenStream(adaptor,"token KW_FORMATTED");
        RewriteRuleTokenStream stream_KW_PARTITIONS=new RewriteRuleTokenStream(adaptor,"token KW_PARTITIONS");
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleTokenStream stream_KW_IN=new RewriteRuleTokenStream(adaptor,"token KW_IN");
        RewriteRuleTokenStream stream_KW_COMPACTIONS=new RewriteRuleTokenStream(adaptor,"token KW_COMPACTIONS");
        RewriteRuleTokenStream stream_KW_SCHEMAS=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMAS");
        RewriteRuleTokenStream stream_KW_TABLES=new RewriteRuleTokenStream(adaptor,"token KW_TABLES");
        RewriteRuleTokenStream stream_KW_INDEX=new RewriteRuleTokenStream(adaptor,"token KW_INDEX");
        RewriteRuleTokenStream stream_KW_EXTENDED=new RewriteRuleTokenStream(adaptor,"token KW_EXTENDED");
        RewriteRuleTokenStream stream_KW_DATABASES=new RewriteRuleTokenStream(adaptor,"token KW_DATABASES");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_showStmtIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule showStmtIdentifier");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
        RewriteRuleSubtreeStream stream_showFunctionIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule showFunctionIdentifier");
        RewriteRuleSubtreeStream stream_partTypeExpr=new RewriteRuleSubtreeStream(adaptor,"rule partTypeExpr");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("show statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:5: ( KW_SHOW ( KW_DATABASES | KW_SCHEMAS ) ( KW_LIKE showStmtIdentifier )? -> ^( TOK_SHOWDATABASES ( showStmtIdentifier )? ) | KW_SHOW KW_TABLES ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )? -> ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? ) | KW_SHOW KW_COLUMNS ( KW_FROM | KW_IN ) tableName ( ( KW_FROM | KW_IN ) db_name= identifier )? -> ^( TOK_SHOWCOLUMNS tableName ( $db_name)? ) | KW_SHOW KW_FUNCTIONS ( showFunctionIdentifier )? -> ^( TOK_SHOWFUNCTIONS ( showFunctionIdentifier )? ) | KW_SHOW KW_PARTITIONS tabName= tableName ( partitionSpec )? -> ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? ) | KW_SHOW KW_CREATE KW_TABLE tabName= tableName -> ^( TOK_SHOW_CREATETABLE $tabName) | KW_SHOW KW_TABLE KW_EXTENDED ( ( KW_FROM | KW_IN ) db_name= identifier )? KW_LIKE showStmtIdentifier ( partitionSpec )? -> ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? ) | KW_SHOW KW_TBLPROPERTIES tableName ( LPAREN prptyName= StringLiteral RPAREN )? -> ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? ) | KW_SHOW KW_LOCKS (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? ) | KW_SHOW KW_LOCKS ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) (isExtended= KW_EXTENDED )? -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? ) | KW_SHOW (showOptions= KW_FORMATTED )? ( KW_INDEX | KW_INDEXES ) KW_ON showStmtIdentifier ( ( KW_FROM | KW_IN ) db_name= identifier )? -> ^( TOK_SHOWINDEXES showStmtIdentifier ( $showOptions)? ( $db_name)? ) | KW_SHOW KW_COMPACTIONS -> ^( TOK_SHOW_COMPACTIONS ) | KW_SHOW KW_TRANSACTIONS -> ^( TOK_SHOW_TRANSACTIONS ) | KW_SHOW KW_CONF StringLiteral -> ^( TOK_SHOWCONF StringLiteral ) )
            int alt132=14;
            switch ( input.LA(1) ) {
            case KW_SHOW:
                {
                switch ( input.LA(2) ) {
                case KW_TABLES:
                    {
                    alt132=2;
                    }
                    break;
                case KW_COLUMNS:
                    {
                    alt132=3;
                    }
                    break;
                case KW_FUNCTIONS:
                    {
                    alt132=4;
                    }
                    break;
                case KW_PARTITIONS:
                    {
                    alt132=5;
                    }
                    break;
                case KW_CREATE:
                    {
                    alt132=6;
                    }
                    break;
                case KW_TABLE:
                    {
                    alt132=7;
                    }
                    break;
                case KW_TBLPROPERTIES:
                    {
                    alt132=8;
                    }
                    break;
                case KW_LOCKS:
                    {
                    switch ( input.LA(3) ) {
                    case EOF:
                    case Identifier:
                    case KW_ADD:
                    case KW_ADMIN:
                    case KW_AFTER:
                    case KW_ALL:
                    case KW_ALTER:
                    case KW_ANALYZE:
                    case KW_ARCHIVE:
                    case KW_ARRAY:
                    case KW_AS:
                    case KW_ASC:
                    case KW_AUTHORIZATION:
                    case KW_BEFORE:
                    case KW_BETWEEN:
                    case KW_BIGINT:
                    case KW_BINARY:
                    case KW_BOOLEAN:
                    case KW_BOTH:
                    case KW_BUCKET:
                    case KW_BUCKETS:
                    case KW_BY:
                    case KW_CASCADE:
                    case KW_CHANGE:
                    case KW_CLUSTER:
                    case KW_CLUSTERED:
                    case KW_CLUSTERSTATUS:
                    case KW_COLLECTION:
                    case KW_COLUMNS:
                    case KW_COMMENT:
                    case KW_COMPACT:
                    case KW_COMPACTIONS:
                    case KW_COMPUTE:
                    case KW_CONCATENATE:
                    case KW_CONTINUE:
                    case KW_CREATE:
                    case KW_CUBE:
                    case KW_CURSOR:
                    case KW_DATA:
                    case KW_DATABASES:
                    case KW_DATE:
                    case KW_DATETIME:
                    case KW_DBPROPERTIES:
                    case KW_DECIMAL:
                    case KW_DEFAULT:
                    case KW_DEFERRED:
                    case KW_DEFINED:
                    case KW_DELETE:
                    case KW_DELIMITED:
                    case KW_DEPENDENCY:
                    case KW_DESC:
                    case KW_DESCRIBE:
                    case KW_DIRECTORIES:
                    case KW_DIRECTORY:
                    case KW_DISABLE:
                    case KW_DISTRIBUTE:
                    case KW_DOUBLE:
                    case KW_DROP:
                    case KW_ELEM_TYPE:
                    case KW_ENABLE:
                    case KW_ESCAPED:
                    case KW_EXCLUSIVE:
                    case KW_EXISTS:
                    case KW_EXPLAIN:
                    case KW_EXPORT:
                    case KW_EXTENDED:
                    case KW_EXTERNAL:
                    case KW_FALSE:
                    case KW_FETCH:
                    case KW_FIELDS:
                    case KW_FILE:
                    case KW_FILEFORMAT:
                    case KW_FIRST:
                    case KW_FLOAT:
                    case KW_FOR:
                    case KW_FORMAT:
                    case KW_FORMATTED:
                    case KW_FULL:
                    case KW_FUNCTIONS:
                    case KW_GRANT:
                    case KW_GROUP:
                    case KW_GROUPING:
                    case KW_HOLD_DDLTIME:
                    case KW_IDXPROPERTIES:
                    case KW_IGNORE:
                    case KW_IMPORT:
                    case KW_IN:
                    case KW_INDEX:
                    case KW_INDEXES:
                    case KW_INNER:
                    case KW_INPATH:
                    case KW_INPUTDRIVER:
                    case KW_INPUTFORMAT:
                    case KW_INSERT:
                    case KW_INT:
                    case KW_INTERSECT:
                    case KW_INTO:
                    case KW_IS:
                    case KW_ITEMS:
                    case KW_JAR:
                    case KW_KEYS:
                    case KW_KEY_TYPE:
                    case KW_LATERAL:
                    case KW_LEFT:
                    case KW_LIKE:
                    case KW_LIMIT:
                    case KW_LINES:
                    case KW_LOAD:
                    case KW_LOCAL:
                    case KW_LOCATION:
                    case KW_LOCK:
                    case KW_LOCKS:
                    case KW_LOGICAL:
                    case KW_LONG:
                    case KW_MAPJOIN:
                    case KW_MATERIALIZED:
                    case KW_MINUS:
                    case KW_MSCK:
                    case KW_NONE:
                    case KW_NOSCAN:
                    case KW_NO_DROP:
                    case KW_NULL:
                    case KW_OF:
                    case KW_OFFLINE:
                    case KW_OPTION:
                    case KW_ORDER:
                    case KW_OUT:
                    case KW_OUTER:
                    case KW_OUTPUTDRIVER:
                    case KW_OUTPUTFORMAT:
                    case KW_OVERWRITE:
                    case KW_OWNER:
                    case KW_PARTITION:
                    case KW_PARTITIONED:
                    case KW_PARTITIONS:
                    case KW_PERCENT:
                    case KW_PLUS:
                    case KW_PRETTY:
                    case KW_PRINCIPALS:
                    case KW_PROCEDURE:
                    case KW_PROTECTION:
                    case KW_PURGE:
                    case KW_RANGE:
                    case KW_READ:
                    case KW_READONLY:
                    case KW_READS:
                    case KW_REBUILD:
                    case KW_RECORDREADER:
                    case KW_RECORDWRITER:
                    case KW_REGEXP:
                    case KW_RENAME:
                    case KW_REPAIR:
                    case KW_REPLACE:
                    case KW_RESTRICT:
                    case KW_REVOKE:
                    case KW_REWRITE:
                    case KW_RIGHT:
                    case KW_RLIKE:
                    case KW_ROLE:
                    case KW_ROLES:
                    case KW_ROLLUP:
                    case KW_ROW:
                    case KW_ROWS:
                    case KW_SCHEMAS:
                    case KW_SEMI:
                    case KW_SERDE:
                    case KW_SERDEPROPERTIES:
                    case KW_SET:
                    case KW_SETS:
                    case KW_SHARED:
                    case KW_SHOW:
                    case KW_SHOW_DATABASE:
                    case KW_SKEWED:
                    case KW_SMALLINT:
                    case KW_SORT:
                    case KW_SORTED:
                    case KW_SSL:
                    case KW_STATISTICS:
                    case KW_STORED:
                    case KW_STREAMTABLE:
                    case KW_STRING:
                    case KW_STRUCT:
                    case KW_TABLE:
                    case KW_TABLES:
                    case KW_TBLPROPERTIES:
                    case KW_TEMPORARY:
                    case KW_TERMINATED:
                    case KW_TIMESTAMP:
                    case KW_TINYINT:
                    case KW_TO:
                    case KW_TOUCH:
                    case KW_TRANSACTIONS:
                    case KW_TRIGGER:
                    case KW_TRUE:
                    case KW_TRUNCATE:
                    case KW_UNARCHIVE:
                    case KW_UNDO:
                    case KW_UNION:
                    case KW_UNIONTYPE:
                    case KW_UNLOCK:
                    case KW_UNSET:
                    case KW_UNSIGNED:
                    case KW_UPDATE:
                    case KW_USE:
                    case KW_USER:
                    case KW_USING:
                    case KW_UTC:
                    case KW_UTCTIMESTAMP:
                    case KW_VALUES:
                    case KW_VALUE_TYPE:
                    case KW_VIEW:
                    case KW_WHILE:
                    case KW_WITH:
                        {
                        alt132=9;
                        }
                        break;
                    case KW_SCHEMA:
                        {
                        alt132=9;
                        }
                        break;
                    case KW_DATABASE:
                        {
                        alt132=10;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 132, 9, input);

                        throw nvae;

                    }

                    }
                    break;
                case KW_COMPACTIONS:
                    {
                    alt132=12;
                    }
                    break;
                case KW_TRANSACTIONS:
                    {
                    alt132=13;
                    }
                    break;
                case KW_CONF:
                    {
                    alt132=14;
                    }
                    break;
                case KW_DATABASES:
                case KW_SCHEMAS:
                    {
                    alt132=1;
                    }
                    break;
                case KW_FORMATTED:
                case KW_INDEX:
                case KW_INDEXES:
                    {
                    alt132=11;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 132, 1, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 132, 0, input);

                throw nvae;

            }

            switch (alt132) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:7: KW_SHOW ( KW_DATABASES | KW_SCHEMAS ) ( KW_LIKE showStmtIdentifier )?
                    {
                    KW_SHOW387=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6629);  
                    stream_KW_SHOW.add(KW_SHOW387);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:15: ( KW_DATABASES | KW_SCHEMAS )
                    int alt110=2;
                    switch ( input.LA(1) ) {
                    case KW_DATABASES:
                        {
                        alt110=1;
                        }
                        break;
                    case KW_SCHEMAS:
                        {
                        alt110=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 110, 0, input);

                        throw nvae;

                    }

                    switch (alt110) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:16: KW_DATABASES
                            {
                            KW_DATABASES388=(Token)match(input,KW_DATABASES,FOLLOW_KW_DATABASES_in_showStatement6632);  
                            stream_KW_DATABASES.add(KW_DATABASES388);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:29: KW_SCHEMAS
                            {
                            KW_SCHEMAS389=(Token)match(input,KW_SCHEMAS,FOLLOW_KW_SCHEMAS_in_showStatement6634);  
                            stream_KW_SCHEMAS.add(KW_SCHEMAS389);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:41: ( KW_LIKE showStmtIdentifier )?
                    int alt111=2;
                    switch ( input.LA(1) ) {
                        case KW_LIKE:
                            {
                            alt111=1;
                            }
                            break;
                    }

                    switch (alt111) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:42: KW_LIKE showStmtIdentifier
                            {
                            KW_LIKE390=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement6638);  
                            stream_KW_LIKE.add(KW_LIKE390);


                            pushFollow(FOLLOW_showStmtIdentifier_in_showStatement6640);
                            showStmtIdentifier391=showStmtIdentifier();

                            state._fsp--;

                            stream_showStmtIdentifier.add(showStmtIdentifier391.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: showStmtIdentifier
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1333:71: -> ^( TOK_SHOWDATABASES ( showStmtIdentifier )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:74: ^( TOK_SHOWDATABASES ( showStmtIdentifier )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWDATABASES, "TOK_SHOWDATABASES")
                        , root_1);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1333:94: ( showStmtIdentifier )?
                        if ( stream_showStmtIdentifier.hasNext() ) {
                            adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());

                        }
                        stream_showStmtIdentifier.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:7: KW_SHOW KW_TABLES ( ( KW_FROM | KW_IN ) db_name= identifier )? ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
                    {
                    KW_SHOW392=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6659);  
                    stream_KW_SHOW.add(KW_SHOW392);


                    KW_TABLES393=(Token)match(input,KW_TABLES,FOLLOW_KW_TABLES_in_showStatement6661);  
                    stream_KW_TABLES.add(KW_TABLES393);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:25: ( ( KW_FROM | KW_IN ) db_name= identifier )?
                    int alt113=2;
                    switch ( input.LA(1) ) {
                        case KW_FROM:
                            {
                            alt113=1;
                            }
                            break;
                        case KW_IN:
                            {
                            switch ( input.LA(2) ) {
                                case Identifier:
                                case KW_ADD:
                                case KW_ADMIN:
                                case KW_AFTER:
                                case KW_ALL:
                                case KW_ALTER:
                                case KW_ANALYZE:
                                case KW_ARCHIVE:
                                case KW_ARRAY:
                                case KW_AS:
                                case KW_ASC:
                                case KW_AUTHORIZATION:
                                case KW_BEFORE:
                                case KW_BETWEEN:
                                case KW_BIGINT:
                                case KW_BINARY:
                                case KW_BOOLEAN:
                                case KW_BOTH:
                                case KW_BUCKET:
                                case KW_BUCKETS:
                                case KW_BY:
                                case KW_CASCADE:
                                case KW_CHANGE:
                                case KW_CLUSTER:
                                case KW_CLUSTERED:
                                case KW_CLUSTERSTATUS:
                                case KW_COLLECTION:
                                case KW_COLUMNS:
                                case KW_COMMENT:
                                case KW_COMPACT:
                                case KW_COMPACTIONS:
                                case KW_COMPUTE:
                                case KW_CONCATENATE:
                                case KW_CONTINUE:
                                case KW_CREATE:
                                case KW_CUBE:
                                case KW_CURSOR:
                                case KW_DATA:
                                case KW_DATABASES:
                                case KW_DATE:
                                case KW_DATETIME:
                                case KW_DBPROPERTIES:
                                case KW_DECIMAL:
                                case KW_DEFAULT:
                                case KW_DEFERRED:
                                case KW_DEFINED:
                                case KW_DELETE:
                                case KW_DELIMITED:
                                case KW_DEPENDENCY:
                                case KW_DESC:
                                case KW_DESCRIBE:
                                case KW_DIRECTORIES:
                                case KW_DIRECTORY:
                                case KW_DISABLE:
                                case KW_DISTRIBUTE:
                                case KW_DOUBLE:
                                case KW_DROP:
                                case KW_ELEM_TYPE:
                                case KW_ENABLE:
                                case KW_ESCAPED:
                                case KW_EXCLUSIVE:
                                case KW_EXISTS:
                                case KW_EXPLAIN:
                                case KW_EXPORT:
                                case KW_EXTERNAL:
                                case KW_FALSE:
                                case KW_FETCH:
                                case KW_FIELDS:
                                case KW_FILE:
                                case KW_FILEFORMAT:
                                case KW_FIRST:
                                case KW_FLOAT:
                                case KW_FOR:
                                case KW_FORMAT:
                                case KW_FORMATTED:
                                case KW_FULL:
                                case KW_FUNCTIONS:
                                case KW_GRANT:
                                case KW_GROUP:
                                case KW_GROUPING:
                                case KW_HOLD_DDLTIME:
                                case KW_IDXPROPERTIES:
                                case KW_IGNORE:
                                case KW_IMPORT:
                                case KW_IN:
                                case KW_INDEX:
                                case KW_INDEXES:
                                case KW_INNER:
                                case KW_INPATH:
                                case KW_INPUTDRIVER:
                                case KW_INPUTFORMAT:
                                case KW_INSERT:
                                case KW_INT:
                                case KW_INTERSECT:
                                case KW_INTO:
                                case KW_IS:
                                case KW_ITEMS:
                                case KW_JAR:
                                case KW_KEYS:
                                case KW_KEY_TYPE:
                                case KW_LATERAL:
                                case KW_LEFT:
                                case KW_LIKE:
                                case KW_LIMIT:
                                case KW_LINES:
                                case KW_LOAD:
                                case KW_LOCAL:
                                case KW_LOCATION:
                                case KW_LOCK:
                                case KW_LOCKS:
                                case KW_LOGICAL:
                                case KW_LONG:
                                case KW_MAPJOIN:
                                case KW_MATERIALIZED:
                                case KW_MINUS:
                                case KW_MSCK:
                                case KW_NONE:
                                case KW_NOSCAN:
                                case KW_NO_DROP:
                                case KW_NULL:
                                case KW_OF:
                                case KW_OFFLINE:
                                case KW_OPTION:
                                case KW_ORDER:
                                case KW_OUT:
                                case KW_OUTER:
                                case KW_OUTPUTDRIVER:
                                case KW_OUTPUTFORMAT:
                                case KW_OVERWRITE:
                                case KW_OWNER:
                                case KW_PARTITION:
                                case KW_PARTITIONED:
                                case KW_PARTITIONS:
                                case KW_PERCENT:
                                case KW_PLUS:
                                case KW_PRETTY:
                                case KW_PRINCIPALS:
                                case KW_PROCEDURE:
                                case KW_PROTECTION:
                                case KW_PURGE:
                                case KW_RANGE:
                                case KW_READ:
                                case KW_READONLY:
                                case KW_READS:
                                case KW_REBUILD:
                                case KW_RECORDREADER:
                                case KW_RECORDWRITER:
                                case KW_REGEXP:
                                case KW_RENAME:
                                case KW_REPAIR:
                                case KW_REPLACE:
                                case KW_RESTRICT:
                                case KW_REVOKE:
                                case KW_REWRITE:
                                case KW_RIGHT:
                                case KW_RLIKE:
                                case KW_ROLE:
                                case KW_ROLES:
                                case KW_ROLLUP:
                                case KW_ROW:
                                case KW_ROWS:
                                case KW_SCHEMA:
                                case KW_SCHEMAS:
                                case KW_SEMI:
                                case KW_SERDE:
                                case KW_SERDEPROPERTIES:
                                case KW_SET:
                                case KW_SETS:
                                case KW_SHARED:
                                case KW_SHOW:
                                case KW_SHOW_DATABASE:
                                case KW_SKEWED:
                                case KW_SMALLINT:
                                case KW_SORT:
                                case KW_SORTED:
                                case KW_SSL:
                                case KW_STATISTICS:
                                case KW_STORED:
                                case KW_STREAMTABLE:
                                case KW_STRING:
                                case KW_STRUCT:
                                case KW_TABLE:
                                case KW_TABLES:
                                case KW_TBLPROPERTIES:
                                case KW_TEMPORARY:
                                case KW_TERMINATED:
                                case KW_TIMESTAMP:
                                case KW_TINYINT:
                                case KW_TO:
                                case KW_TOUCH:
                                case KW_TRANSACTIONS:
                                case KW_TRIGGER:
                                case KW_TRUE:
                                case KW_TRUNCATE:
                                case KW_UNARCHIVE:
                                case KW_UNDO:
                                case KW_UNION:
                                case KW_UNIONTYPE:
                                case KW_UNLOCK:
                                case KW_UNSET:
                                case KW_UNSIGNED:
                                case KW_UPDATE:
                                case KW_USE:
                                case KW_USER:
                                case KW_USING:
                                case KW_UTC:
                                case KW_UTCTIMESTAMP:
                                case KW_VALUES:
                                case KW_VALUE_TYPE:
                                case KW_VIEW:
                                case KW_WHILE:
                                case KW_WITH:
                                    {
                                    alt113=1;
                                    }
                                    break;
                            }

                            }
                            break;
                    }

                    switch (alt113) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:26: ( KW_FROM | KW_IN ) db_name= identifier
                            {
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:26: ( KW_FROM | KW_IN )
                            int alt112=2;
                            switch ( input.LA(1) ) {
                            case KW_FROM:
                                {
                                alt112=1;
                                }
                                break;
                            case KW_IN:
                                {
                                alt112=2;
                                }
                                break;
                            default:
                                NoViableAltException nvae =
                                    new NoViableAltException("", 112, 0, input);

                                throw nvae;

                            }

                            switch (alt112) {
                                case 1 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:27: KW_FROM
                                    {
                                    KW_FROM394=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement6665);  
                                    stream_KW_FROM.add(KW_FROM394);


                                    }
                                    break;
                                case 2 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:35: KW_IN
                                    {
                                    KW_IN395=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement6667);  
                                    stream_KW_IN.add(KW_IN395);


                                    }
                                    break;

                            }


                            pushFollow(FOLLOW_identifier_in_showStatement6672);
                            db_name=identifier();

                            state._fsp--;

                            stream_identifier.add(db_name.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:63: ( KW_LIKE showStmtIdentifier | showStmtIdentifier )?
                    int alt114=3;
                    switch ( input.LA(1) ) {
                        case KW_LIKE:
                            {
                            switch ( input.LA(2) ) {
                                case Identifier:
                                case KW_ADD:
                                case KW_ADMIN:
                                case KW_AFTER:
                                case KW_ALL:
                                case KW_ALTER:
                                case KW_ANALYZE:
                                case KW_ARCHIVE:
                                case KW_ARRAY:
                                case KW_AS:
                                case KW_ASC:
                                case KW_AUTHORIZATION:
                                case KW_BEFORE:
                                case KW_BETWEEN:
                                case KW_BIGINT:
                                case KW_BINARY:
                                case KW_BOOLEAN:
                                case KW_BOTH:
                                case KW_BUCKET:
                                case KW_BUCKETS:
                                case KW_BY:
                                case KW_CASCADE:
                                case KW_CHANGE:
                                case KW_CLUSTER:
                                case KW_CLUSTERED:
                                case KW_CLUSTERSTATUS:
                                case KW_COLLECTION:
                                case KW_COLUMNS:
                                case KW_COMMENT:
                                case KW_COMPACT:
                                case KW_COMPACTIONS:
                                case KW_COMPUTE:
                                case KW_CONCATENATE:
                                case KW_CONTINUE:
                                case KW_CREATE:
                                case KW_CUBE:
                                case KW_CURSOR:
                                case KW_DATA:
                                case KW_DATABASES:
                                case KW_DATE:
                                case KW_DATETIME:
                                case KW_DBPROPERTIES:
                                case KW_DECIMAL:
                                case KW_DEFAULT:
                                case KW_DEFERRED:
                                case KW_DEFINED:
                                case KW_DELETE:
                                case KW_DELIMITED:
                                case KW_DEPENDENCY:
                                case KW_DESC:
                                case KW_DESCRIBE:
                                case KW_DIRECTORIES:
                                case KW_DIRECTORY:
                                case KW_DISABLE:
                                case KW_DISTRIBUTE:
                                case KW_DOUBLE:
                                case KW_DROP:
                                case KW_ELEM_TYPE:
                                case KW_ENABLE:
                                case KW_ESCAPED:
                                case KW_EXCLUSIVE:
                                case KW_EXISTS:
                                case KW_EXPLAIN:
                                case KW_EXPORT:
                                case KW_EXTERNAL:
                                case KW_FALSE:
                                case KW_FETCH:
                                case KW_FIELDS:
                                case KW_FILE:
                                case KW_FILEFORMAT:
                                case KW_FIRST:
                                case KW_FLOAT:
                                case KW_FOR:
                                case KW_FORMAT:
                                case KW_FORMATTED:
                                case KW_FULL:
                                case KW_FUNCTIONS:
                                case KW_GRANT:
                                case KW_GROUP:
                                case KW_GROUPING:
                                case KW_HOLD_DDLTIME:
                                case KW_IDXPROPERTIES:
                                case KW_IGNORE:
                                case KW_IMPORT:
                                case KW_IN:
                                case KW_INDEX:
                                case KW_INDEXES:
                                case KW_INNER:
                                case KW_INPATH:
                                case KW_INPUTDRIVER:
                                case KW_INPUTFORMAT:
                                case KW_INSERT:
                                case KW_INT:
                                case KW_INTERSECT:
                                case KW_INTO:
                                case KW_IS:
                                case KW_ITEMS:
                                case KW_JAR:
                                case KW_KEYS:
                                case KW_KEY_TYPE:
                                case KW_LATERAL:
                                case KW_LEFT:
                                case KW_LIKE:
                                case KW_LIMIT:
                                case KW_LINES:
                                case KW_LOAD:
                                case KW_LOCAL:
                                case KW_LOCATION:
                                case KW_LOCK:
                                case KW_LOCKS:
                                case KW_LOGICAL:
                                case KW_LONG:
                                case KW_MAPJOIN:
                                case KW_MATERIALIZED:
                                case KW_MINUS:
                                case KW_MSCK:
                                case KW_NONE:
                                case KW_NOSCAN:
                                case KW_NO_DROP:
                                case KW_NULL:
                                case KW_OF:
                                case KW_OFFLINE:
                                case KW_OPTION:
                                case KW_ORDER:
                                case KW_OUT:
                                case KW_OUTER:
                                case KW_OUTPUTDRIVER:
                                case KW_OUTPUTFORMAT:
                                case KW_OVERWRITE:
                                case KW_OWNER:
                                case KW_PARTITION:
                                case KW_PARTITIONED:
                                case KW_PARTITIONS:
                                case KW_PERCENT:
                                case KW_PLUS:
                                case KW_PRETTY:
                                case KW_PRINCIPALS:
                                case KW_PROCEDURE:
                                case KW_PROTECTION:
                                case KW_PURGE:
                                case KW_RANGE:
                                case KW_READ:
                                case KW_READONLY:
                                case KW_READS:
                                case KW_REBUILD:
                                case KW_RECORDREADER:
                                case KW_RECORDWRITER:
                                case KW_REGEXP:
                                case KW_RENAME:
                                case KW_REPAIR:
                                case KW_REPLACE:
                                case KW_RESTRICT:
                                case KW_REVOKE:
                                case KW_REWRITE:
                                case KW_RIGHT:
                                case KW_RLIKE:
                                case KW_ROLE:
                                case KW_ROLES:
                                case KW_ROLLUP:
                                case KW_ROW:
                                case KW_ROWS:
                                case KW_SCHEMA:
                                case KW_SCHEMAS:
                                case KW_SEMI:
                                case KW_SERDE:
                                case KW_SERDEPROPERTIES:
                                case KW_SET:
                                case KW_SETS:
                                case KW_SHARED:
                                case KW_SHOW:
                                case KW_SHOW_DATABASE:
                                case KW_SKEWED:
                                case KW_SMALLINT:
                                case KW_SORT:
                                case KW_SORTED:
                                case KW_SSL:
                                case KW_STATISTICS:
                                case KW_STORED:
                                case KW_STREAMTABLE:
                                case KW_STRING:
                                case KW_STRUCT:
                                case KW_TABLE:
                                case KW_TABLES:
                                case KW_TBLPROPERTIES:
                                case KW_TEMPORARY:
                                case KW_TERMINATED:
                                case KW_TIMESTAMP:
                                case KW_TINYINT:
                                case KW_TO:
                                case KW_TOUCH:
                                case KW_TRANSACTIONS:
                                case KW_TRIGGER:
                                case KW_TRUE:
                                case KW_TRUNCATE:
                                case KW_UNARCHIVE:
                                case KW_UNDO:
                                case KW_UNION:
                                case KW_UNIONTYPE:
                                case KW_UNLOCK:
                                case KW_UNSET:
                                case KW_UNSIGNED:
                                case KW_UPDATE:
                                case KW_USE:
                                case KW_USER:
                                case KW_USING:
                                case KW_UTC:
                                case KW_UTCTIMESTAMP:
                                case KW_VALUES:
                                case KW_VALUE_TYPE:
                                case KW_VIEW:
                                case KW_WHILE:
                                case KW_WITH:
                                case StringLiteral:
                                    {
                                    alt114=1;
                                    }
                                    break;
                                case EOF:
                                    {
                                    alt114=2;
                                    }
                                    break;
                            }

                            }
                            break;
                        case Identifier:
                        case KW_ADD:
                        case KW_ADMIN:
                        case KW_AFTER:
                        case KW_ALL:
                        case KW_ALTER:
                        case KW_ANALYZE:
                        case KW_ARCHIVE:
                        case KW_ARRAY:
                        case KW_AS:
                        case KW_ASC:
                        case KW_AUTHORIZATION:
                        case KW_BEFORE:
                        case KW_BETWEEN:
                        case KW_BIGINT:
                        case KW_BINARY:
                        case KW_BOOLEAN:
                        case KW_BOTH:
                        case KW_BUCKET:
                        case KW_BUCKETS:
                        case KW_BY:
                        case KW_CASCADE:
                        case KW_CHANGE:
                        case KW_CLUSTER:
                        case KW_CLUSTERED:
                        case KW_CLUSTERSTATUS:
                        case KW_COLLECTION:
                        case KW_COLUMNS:
                        case KW_COMMENT:
                        case KW_COMPACT:
                        case KW_COMPACTIONS:
                        case KW_COMPUTE:
                        case KW_CONCATENATE:
                        case KW_CONTINUE:
                        case KW_CREATE:
                        case KW_CUBE:
                        case KW_CURSOR:
                        case KW_DATA:
                        case KW_DATABASES:
                        case KW_DATE:
                        case KW_DATETIME:
                        case KW_DBPROPERTIES:
                        case KW_DECIMAL:
                        case KW_DEFAULT:
                        case KW_DEFERRED:
                        case KW_DEFINED:
                        case KW_DELETE:
                        case KW_DELIMITED:
                        case KW_DEPENDENCY:
                        case KW_DESC:
                        case KW_DESCRIBE:
                        case KW_DIRECTORIES:
                        case KW_DIRECTORY:
                        case KW_DISABLE:
                        case KW_DISTRIBUTE:
                        case KW_DOUBLE:
                        case KW_DROP:
                        case KW_ELEM_TYPE:
                        case KW_ENABLE:
                        case KW_ESCAPED:
                        case KW_EXCLUSIVE:
                        case KW_EXISTS:
                        case KW_EXPLAIN:
                        case KW_EXPORT:
                        case KW_EXTERNAL:
                        case KW_FALSE:
                        case KW_FETCH:
                        case KW_FIELDS:
                        case KW_FILE:
                        case KW_FILEFORMAT:
                        case KW_FIRST:
                        case KW_FLOAT:
                        case KW_FOR:
                        case KW_FORMAT:
                        case KW_FORMATTED:
                        case KW_FULL:
                        case KW_FUNCTIONS:
                        case KW_GRANT:
                        case KW_GROUP:
                        case KW_GROUPING:
                        case KW_HOLD_DDLTIME:
                        case KW_IDXPROPERTIES:
                        case KW_IGNORE:
                        case KW_IMPORT:
                        case KW_IN:
                        case KW_INDEX:
                        case KW_INDEXES:
                        case KW_INNER:
                        case KW_INPATH:
                        case KW_INPUTDRIVER:
                        case KW_INPUTFORMAT:
                        case KW_INSERT:
                        case KW_INT:
                        case KW_INTERSECT:
                        case KW_INTO:
                        case KW_IS:
                        case KW_ITEMS:
                        case KW_JAR:
                        case KW_KEYS:
                        case KW_KEY_TYPE:
                        case KW_LATERAL:
                        case KW_LEFT:
                        case KW_LIMIT:
                        case KW_LINES:
                        case KW_LOAD:
                        case KW_LOCAL:
                        case KW_LOCATION:
                        case KW_LOCK:
                        case KW_LOCKS:
                        case KW_LOGICAL:
                        case KW_LONG:
                        case KW_MAPJOIN:
                        case KW_MATERIALIZED:
                        case KW_MINUS:
                        case KW_MSCK:
                        case KW_NONE:
                        case KW_NOSCAN:
                        case KW_NO_DROP:
                        case KW_NULL:
                        case KW_OF:
                        case KW_OFFLINE:
                        case KW_OPTION:
                        case KW_ORDER:
                        case KW_OUT:
                        case KW_OUTER:
                        case KW_OUTPUTDRIVER:
                        case KW_OUTPUTFORMAT:
                        case KW_OVERWRITE:
                        case KW_OWNER:
                        case KW_PARTITION:
                        case KW_PARTITIONED:
                        case KW_PARTITIONS:
                        case KW_PERCENT:
                        case KW_PLUS:
                        case KW_PRETTY:
                        case KW_PRINCIPALS:
                        case KW_PROCEDURE:
                        case KW_PROTECTION:
                        case KW_PURGE:
                        case KW_RANGE:
                        case KW_READ:
                        case KW_READONLY:
                        case KW_READS:
                        case KW_REBUILD:
                        case KW_RECORDREADER:
                        case KW_RECORDWRITER:
                        case KW_REGEXP:
                        case KW_RENAME:
                        case KW_REPAIR:
                        case KW_REPLACE:
                        case KW_RESTRICT:
                        case KW_REVOKE:
                        case KW_REWRITE:
                        case KW_RIGHT:
                        case KW_RLIKE:
                        case KW_ROLE:
                        case KW_ROLES:
                        case KW_ROLLUP:
                        case KW_ROW:
                        case KW_ROWS:
                        case KW_SCHEMA:
                        case KW_SCHEMAS:
                        case KW_SEMI:
                        case KW_SERDE:
                        case KW_SERDEPROPERTIES:
                        case KW_SET:
                        case KW_SETS:
                        case KW_SHARED:
                        case KW_SHOW:
                        case KW_SHOW_DATABASE:
                        case KW_SKEWED:
                        case KW_SMALLINT:
                        case KW_SORT:
                        case KW_SORTED:
                        case KW_SSL:
                        case KW_STATISTICS:
                        case KW_STORED:
                        case KW_STREAMTABLE:
                        case KW_STRING:
                        case KW_STRUCT:
                        case KW_TABLE:
                        case KW_TABLES:
                        case KW_TBLPROPERTIES:
                        case KW_TEMPORARY:
                        case KW_TERMINATED:
                        case KW_TIMESTAMP:
                        case KW_TINYINT:
                        case KW_TO:
                        case KW_TOUCH:
                        case KW_TRANSACTIONS:
                        case KW_TRIGGER:
                        case KW_TRUE:
                        case KW_TRUNCATE:
                        case KW_UNARCHIVE:
                        case KW_UNDO:
                        case KW_UNION:
                        case KW_UNIONTYPE:
                        case KW_UNLOCK:
                        case KW_UNSET:
                        case KW_UNSIGNED:
                        case KW_UPDATE:
                        case KW_USE:
                        case KW_USER:
                        case KW_USING:
                        case KW_UTC:
                        case KW_UTCTIMESTAMP:
                        case KW_VALUES:
                        case KW_VALUE_TYPE:
                        case KW_VIEW:
                        case KW_WHILE:
                        case KW_WITH:
                        case StringLiteral:
                            {
                            alt114=2;
                            }
                            break;
                    }

                    switch (alt114) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:64: KW_LIKE showStmtIdentifier
                            {
                            KW_LIKE396=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement6677);  
                            stream_KW_LIKE.add(KW_LIKE396);


                            pushFollow(FOLLOW_showStmtIdentifier_in_showStatement6679);
                            showStmtIdentifier397=showStmtIdentifier();

                            state._fsp--;

                            stream_showStmtIdentifier.add(showStmtIdentifier397.getTree());

                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:91: showStmtIdentifier
                            {
                            pushFollow(FOLLOW_showStmtIdentifier_in_showStatement6681);
                            showStmtIdentifier398=showStmtIdentifier();

                            state._fsp--;

                            stream_showStmtIdentifier.add(showStmtIdentifier398.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: showStmtIdentifier, db_name
                    // token labels: 
                    // rule labels: retval, db_name
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1334:113: -> ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:116: ^( TOK_SHOWTABLES ( TOK_FROM $db_name)? ( showStmtIdentifier )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWTABLES, "TOK_SHOWTABLES")
                        , root_1);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:133: ( TOK_FROM $db_name)?
                        if ( stream_db_name.hasNext() ) {
                            adaptor.addChild(root_1, 
                            (CommonTree)adaptor.create(TOK_FROM, "TOK_FROM")
                            );

                            adaptor.addChild(root_1, stream_db_name.nextTree());

                        }
                        stream_db_name.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1334:154: ( showStmtIdentifier )?
                        if ( stream_showStmtIdentifier.hasNext() ) {
                            adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());

                        }
                        stream_showStmtIdentifier.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:7: KW_SHOW KW_COLUMNS ( KW_FROM | KW_IN ) tableName ( ( KW_FROM | KW_IN ) db_name= identifier )?
                    {
                    KW_SHOW399=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6709);  
                    stream_KW_SHOW.add(KW_SHOW399);


                    KW_COLUMNS400=(Token)match(input,KW_COLUMNS,FOLLOW_KW_COLUMNS_in_showStatement6711);  
                    stream_KW_COLUMNS.add(KW_COLUMNS400);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:26: ( KW_FROM | KW_IN )
                    int alt115=2;
                    switch ( input.LA(1) ) {
                    case KW_FROM:
                        {
                        alt115=1;
                        }
                        break;
                    case KW_IN:
                        {
                        alt115=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 115, 0, input);

                        throw nvae;

                    }

                    switch (alt115) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:27: KW_FROM
                            {
                            KW_FROM401=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement6714);  
                            stream_KW_FROM.add(KW_FROM401);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:35: KW_IN
                            {
                            KW_IN402=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement6716);  
                            stream_KW_IN.add(KW_IN402);


                            }
                            break;

                    }


                    pushFollow(FOLLOW_tableName_in_showStatement6719);
                    tableName403=tableName();

                    state._fsp--;

                    stream_tableName.add(tableName403.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:52: ( ( KW_FROM | KW_IN ) db_name= identifier )?
                    int alt117=2;
                    switch ( input.LA(1) ) {
                        case KW_FROM:
                        case KW_IN:
                            {
                            alt117=1;
                            }
                            break;
                    }

                    switch (alt117) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:53: ( KW_FROM | KW_IN ) db_name= identifier
                            {
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:53: ( KW_FROM | KW_IN )
                            int alt116=2;
                            switch ( input.LA(1) ) {
                            case KW_FROM:
                                {
                                alt116=1;
                                }
                                break;
                            case KW_IN:
                                {
                                alt116=2;
                                }
                                break;
                            default:
                                NoViableAltException nvae =
                                    new NoViableAltException("", 116, 0, input);

                                throw nvae;

                            }

                            switch (alt116) {
                                case 1 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:54: KW_FROM
                                    {
                                    KW_FROM404=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement6723);  
                                    stream_KW_FROM.add(KW_FROM404);


                                    }
                                    break;
                                case 2 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1335:62: KW_IN
                                    {
                                    KW_IN405=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement6725);  
                                    stream_KW_IN.add(KW_IN405);


                                    }
                                    break;

                            }


                            pushFollow(FOLLOW_identifier_in_showStatement6730);
                            db_name=identifier();

                            state._fsp--;

                            stream_identifier.add(db_name.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: db_name, tableName
                    // token labels: 
                    // rule labels: retval, db_name
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1336:5: -> ^( TOK_SHOWCOLUMNS tableName ( $db_name)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1336:8: ^( TOK_SHOWCOLUMNS tableName ( $db_name)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWCOLUMNS, "TOK_SHOWCOLUMNS")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1336:37: ( $db_name)?
                        if ( stream_db_name.hasNext() ) {
                            adaptor.addChild(root_1, stream_db_name.nextTree());

                        }
                        stream_db_name.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1337:7: KW_SHOW KW_FUNCTIONS ( showFunctionIdentifier )?
                    {
                    KW_SHOW406=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6756);  
                    stream_KW_SHOW.add(KW_SHOW406);


                    KW_FUNCTIONS407=(Token)match(input,KW_FUNCTIONS,FOLLOW_KW_FUNCTIONS_in_showStatement6758);  
                    stream_KW_FUNCTIONS.add(KW_FUNCTIONS407);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1337:28: ( showFunctionIdentifier )?
                    int alt118=2;
                    switch ( input.LA(1) ) {
                        case Identifier:
                        case KW_ADD:
                        case KW_ADMIN:
                        case KW_AFTER:
                        case KW_ALL:
                        case KW_ALTER:
                        case KW_ANALYZE:
                        case KW_ARCHIVE:
                        case KW_ARRAY:
                        case KW_AS:
                        case KW_ASC:
                        case KW_AUTHORIZATION:
                        case KW_BEFORE:
                        case KW_BETWEEN:
                        case KW_BIGINT:
                        case KW_BINARY:
                        case KW_BOOLEAN:
                        case KW_BOTH:
                        case KW_BUCKET:
                        case KW_BUCKETS:
                        case KW_BY:
                        case KW_CASCADE:
                        case KW_CHANGE:
                        case KW_CLUSTER:
                        case KW_CLUSTERED:
                        case KW_CLUSTERSTATUS:
                        case KW_COLLECTION:
                        case KW_COLUMNS:
                        case KW_COMMENT:
                        case KW_COMPACT:
                        case KW_COMPACTIONS:
                        case KW_COMPUTE:
                        case KW_CONCATENATE:
                        case KW_CONTINUE:
                        case KW_CREATE:
                        case KW_CUBE:
                        case KW_CURSOR:
                        case KW_DATA:
                        case KW_DATABASES:
                        case KW_DATE:
                        case KW_DATETIME:
                        case KW_DBPROPERTIES:
                        case KW_DECIMAL:
                        case KW_DEFAULT:
                        case KW_DEFERRED:
                        case KW_DEFINED:
                        case KW_DELETE:
                        case KW_DELIMITED:
                        case KW_DEPENDENCY:
                        case KW_DESC:
                        case KW_DESCRIBE:
                        case KW_DIRECTORIES:
                        case KW_DIRECTORY:
                        case KW_DISABLE:
                        case KW_DISTRIBUTE:
                        case KW_DOUBLE:
                        case KW_DROP:
                        case KW_ELEM_TYPE:
                        case KW_ENABLE:
                        case KW_ESCAPED:
                        case KW_EXCLUSIVE:
                        case KW_EXISTS:
                        case KW_EXPLAIN:
                        case KW_EXPORT:
                        case KW_EXTERNAL:
                        case KW_FALSE:
                        case KW_FETCH:
                        case KW_FIELDS:
                        case KW_FILE:
                        case KW_FILEFORMAT:
                        case KW_FIRST:
                        case KW_FLOAT:
                        case KW_FOR:
                        case KW_FORMAT:
                        case KW_FORMATTED:
                        case KW_FULL:
                        case KW_FUNCTIONS:
                        case KW_GRANT:
                        case KW_GROUP:
                        case KW_GROUPING:
                        case KW_HOLD_DDLTIME:
                        case KW_IDXPROPERTIES:
                        case KW_IGNORE:
                        case KW_IMPORT:
                        case KW_IN:
                        case KW_INDEX:
                        case KW_INDEXES:
                        case KW_INNER:
                        case KW_INPATH:
                        case KW_INPUTDRIVER:
                        case KW_INPUTFORMAT:
                        case KW_INSERT:
                        case KW_INT:
                        case KW_INTERSECT:
                        case KW_INTO:
                        case KW_IS:
                        case KW_ITEMS:
                        case KW_JAR:
                        case KW_KEYS:
                        case KW_KEY_TYPE:
                        case KW_LATERAL:
                        case KW_LEFT:
                        case KW_LIKE:
                        case KW_LIMIT:
                        case KW_LINES:
                        case KW_LOAD:
                        case KW_LOCAL:
                        case KW_LOCATION:
                        case KW_LOCK:
                        case KW_LOCKS:
                        case KW_LOGICAL:
                        case KW_LONG:
                        case KW_MAPJOIN:
                        case KW_MATERIALIZED:
                        case KW_MINUS:
                        case KW_MSCK:
                        case KW_NONE:
                        case KW_NOSCAN:
                        case KW_NO_DROP:
                        case KW_NULL:
                        case KW_OF:
                        case KW_OFFLINE:
                        case KW_OPTION:
                        case KW_ORDER:
                        case KW_OUT:
                        case KW_OUTER:
                        case KW_OUTPUTDRIVER:
                        case KW_OUTPUTFORMAT:
                        case KW_OVERWRITE:
                        case KW_OWNER:
                        case KW_PARTITION:
                        case KW_PARTITIONED:
                        case KW_PARTITIONS:
                        case KW_PERCENT:
                        case KW_PLUS:
                        case KW_PRETTY:
                        case KW_PRINCIPALS:
                        case KW_PROCEDURE:
                        case KW_PROTECTION:
                        case KW_PURGE:
                        case KW_RANGE:
                        case KW_READ:
                        case KW_READONLY:
                        case KW_READS:
                        case KW_REBUILD:
                        case KW_RECORDREADER:
                        case KW_RECORDWRITER:
                        case KW_REGEXP:
                        case KW_RENAME:
                        case KW_REPAIR:
                        case KW_REPLACE:
                        case KW_RESTRICT:
                        case KW_REVOKE:
                        case KW_REWRITE:
                        case KW_RIGHT:
                        case KW_RLIKE:
                        case KW_ROLE:
                        case KW_ROLES:
                        case KW_ROLLUP:
                        case KW_ROW:
                        case KW_ROWS:
                        case KW_SCHEMA:
                        case KW_SCHEMAS:
                        case KW_SEMI:
                        case KW_SERDE:
                        case KW_SERDEPROPERTIES:
                        case KW_SET:
                        case KW_SETS:
                        case KW_SHARED:
                        case KW_SHOW:
                        case KW_SHOW_DATABASE:
                        case KW_SKEWED:
                        case KW_SMALLINT:
                        case KW_SORT:
                        case KW_SORTED:
                        case KW_SSL:
                        case KW_STATISTICS:
                        case KW_STORED:
                        case KW_STREAMTABLE:
                        case KW_STRING:
                        case KW_STRUCT:
                        case KW_TABLE:
                        case KW_TABLES:
                        case KW_TBLPROPERTIES:
                        case KW_TEMPORARY:
                        case KW_TERMINATED:
                        case KW_TIMESTAMP:
                        case KW_TINYINT:
                        case KW_TO:
                        case KW_TOUCH:
                        case KW_TRANSACTIONS:
                        case KW_TRIGGER:
                        case KW_TRUE:
                        case KW_TRUNCATE:
                        case KW_UNARCHIVE:
                        case KW_UNDO:
                        case KW_UNION:
                        case KW_UNIONTYPE:
                        case KW_UNLOCK:
                        case KW_UNSET:
                        case KW_UNSIGNED:
                        case KW_UPDATE:
                        case KW_USE:
                        case KW_USER:
                        case KW_USING:
                        case KW_UTC:
                        case KW_UTCTIMESTAMP:
                        case KW_VALUES:
                        case KW_VALUE_TYPE:
                        case KW_VIEW:
                        case KW_WHILE:
                        case KW_WITH:
                        case StringLiteral:
                            {
                            alt118=1;
                            }
                            break;
                    }

                    switch (alt118) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1337:28: showFunctionIdentifier
                            {
                            pushFollow(FOLLOW_showFunctionIdentifier_in_showStatement6760);
                            showFunctionIdentifier408=showFunctionIdentifier();

                            state._fsp--;

                            stream_showFunctionIdentifier.add(showFunctionIdentifier408.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: showFunctionIdentifier
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1337:53: -> ^( TOK_SHOWFUNCTIONS ( showFunctionIdentifier )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1337:56: ^( TOK_SHOWFUNCTIONS ( showFunctionIdentifier )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWFUNCTIONS, "TOK_SHOWFUNCTIONS")
                        , root_1);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1337:76: ( showFunctionIdentifier )?
                        if ( stream_showFunctionIdentifier.hasNext() ) {
                            adaptor.addChild(root_1, stream_showFunctionIdentifier.nextTree());

                        }
                        stream_showFunctionIdentifier.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1338:7: KW_SHOW KW_PARTITIONS tabName= tableName ( partitionSpec )?
                    {
                    KW_SHOW409=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6779);  
                    stream_KW_SHOW.add(KW_SHOW409);


                    KW_PARTITIONS410=(Token)match(input,KW_PARTITIONS,FOLLOW_KW_PARTITIONS_in_showStatement6781);  
                    stream_KW_PARTITIONS.add(KW_PARTITIONS410);


                    pushFollow(FOLLOW_tableName_in_showStatement6785);
                    tabName=tableName();

                    state._fsp--;

                    stream_tableName.add(tabName.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1338:47: ( partitionSpec )?
                    int alt119=2;
                    switch ( input.LA(1) ) {
                        case KW_PARTITION:
                            {
                            alt119=1;
                            }
                            break;
                    }

                    switch (alt119) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1338:47: partitionSpec
                            {
                            pushFollow(FOLLOW_partitionSpec_in_showStatement6787);
                            partitionSpec411=partitionSpec();

                            state._fsp--;

                            stream_partitionSpec.add(partitionSpec411.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: partitionSpec, tabName
                    // token labels: 
                    // rule labels: retval, tabName
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1338:62: -> ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1338:65: ^( TOK_SHOWPARTITIONS $tabName ( partitionSpec )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWPARTITIONS, "TOK_SHOWPARTITIONS")
                        , root_1);

                        adaptor.addChild(root_1, stream_tabName.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1338:95: ( partitionSpec )?
                        if ( stream_partitionSpec.hasNext() ) {
                            adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                        }
                        stream_partitionSpec.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 6 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1339:7: KW_SHOW KW_CREATE KW_TABLE tabName= tableName
                    {
                    KW_SHOW412=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6809);  
                    stream_KW_SHOW.add(KW_SHOW412);


                    KW_CREATE413=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_showStatement6811);  
                    stream_KW_CREATE.add(KW_CREATE413);


                    KW_TABLE414=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_showStatement6813);  
                    stream_KW_TABLE.add(KW_TABLE414);


                    pushFollow(FOLLOW_tableName_in_showStatement6817);
                    tabName=tableName();

                    state._fsp--;

                    stream_tableName.add(tabName.getTree());

                    // AST REWRITE
                    // elements: tabName
                    // token labels: 
                    // rule labels: retval, tabName
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_tabName=new RewriteRuleSubtreeStream(adaptor,"rule tabName",tabName!=null?tabName.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1339:52: -> ^( TOK_SHOW_CREATETABLE $tabName)
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1339:55: ^( TOK_SHOW_CREATETABLE $tabName)
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOW_CREATETABLE, "TOK_SHOW_CREATETABLE")
                        , root_1);

                        adaptor.addChild(root_1, stream_tabName.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 7 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:7: KW_SHOW KW_TABLE KW_EXTENDED ( ( KW_FROM | KW_IN ) db_name= identifier )? KW_LIKE showStmtIdentifier ( partitionSpec )?
                    {
                    KW_SHOW415=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6834);  
                    stream_KW_SHOW.add(KW_SHOW415);


                    KW_TABLE416=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_showStatement6836);  
                    stream_KW_TABLE.add(KW_TABLE416);


                    KW_EXTENDED417=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_showStatement6838);  
                    stream_KW_EXTENDED.add(KW_EXTENDED417);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:36: ( ( KW_FROM | KW_IN ) db_name= identifier )?
                    int alt121=2;
                    switch ( input.LA(1) ) {
                        case KW_FROM:
                        case KW_IN:
                            {
                            alt121=1;
                            }
                            break;
                    }

                    switch (alt121) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:37: ( KW_FROM | KW_IN ) db_name= identifier
                            {
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:37: ( KW_FROM | KW_IN )
                            int alt120=2;
                            switch ( input.LA(1) ) {
                            case KW_FROM:
                                {
                                alt120=1;
                                }
                                break;
                            case KW_IN:
                                {
                                alt120=2;
                                }
                                break;
                            default:
                                NoViableAltException nvae =
                                    new NoViableAltException("", 120, 0, input);

                                throw nvae;

                            }

                            switch (alt120) {
                                case 1 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:38: KW_FROM
                                    {
                                    KW_FROM418=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement6842);  
                                    stream_KW_FROM.add(KW_FROM418);


                                    }
                                    break;
                                case 2 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:46: KW_IN
                                    {
                                    KW_IN419=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement6844);  
                                    stream_KW_IN.add(KW_IN419);


                                    }
                                    break;

                            }


                            pushFollow(FOLLOW_identifier_in_showStatement6849);
                            db_name=identifier();

                            state._fsp--;

                            stream_identifier.add(db_name.getTree());

                            }
                            break;

                    }


                    KW_LIKE420=(Token)match(input,KW_LIKE,FOLLOW_KW_LIKE_in_showStatement6853);  
                    stream_KW_LIKE.add(KW_LIKE420);


                    pushFollow(FOLLOW_showStmtIdentifier_in_showStatement6855);
                    showStmtIdentifier421=showStmtIdentifier();

                    state._fsp--;

                    stream_showStmtIdentifier.add(showStmtIdentifier421.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:101: ( partitionSpec )?
                    int alt122=2;
                    switch ( input.LA(1) ) {
                        case KW_PARTITION:
                            {
                            alt122=1;
                            }
                            break;
                    }

                    switch (alt122) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1340:101: partitionSpec
                            {
                            pushFollow(FOLLOW_partitionSpec_in_showStatement6857);
                            partitionSpec422=partitionSpec();

                            state._fsp--;

                            stream_partitionSpec.add(partitionSpec422.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: partitionSpec, showStmtIdentifier, db_name
                    // token labels: 
                    // rule labels: retval, db_name
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1341:5: -> ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1341:8: ^( TOK_SHOW_TABLESTATUS showStmtIdentifier ( $db_name)? ( partitionSpec )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOW_TABLESTATUS, "TOK_SHOW_TABLESTATUS")
                        , root_1);

                        adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1341:51: ( $db_name)?
                        if ( stream_db_name.hasNext() ) {
                            adaptor.addChild(root_1, stream_db_name.nextTree());

                        }
                        stream_db_name.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1341:60: ( partitionSpec )?
                        if ( stream_partitionSpec.hasNext() ) {
                            adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                        }
                        stream_partitionSpec.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 8 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:7: KW_SHOW KW_TBLPROPERTIES tableName ( LPAREN prptyName= StringLiteral RPAREN )?
                    {
                    KW_SHOW423=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6885);  
                    stream_KW_SHOW.add(KW_SHOW423);


                    KW_TBLPROPERTIES424=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_showStatement6887);  
                    stream_KW_TBLPROPERTIES.add(KW_TBLPROPERTIES424);


                    pushFollow(FOLLOW_tableName_in_showStatement6889);
                    tableName425=tableName();

                    state._fsp--;

                    stream_tableName.add(tableName425.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:42: ( LPAREN prptyName= StringLiteral RPAREN )?
                    int alt123=2;
                    switch ( input.LA(1) ) {
                        case LPAREN:
                            {
                            alt123=1;
                            }
                            break;
                    }

                    switch (alt123) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:43: LPAREN prptyName= StringLiteral RPAREN
                            {
                            LPAREN426=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_showStatement6892);  
                            stream_LPAREN.add(LPAREN426);


                            prptyName=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showStatement6896);  
                            stream_StringLiteral.add(prptyName);


                            RPAREN427=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_showStatement6898);  
                            stream_RPAREN.add(RPAREN427);


                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: tableName, prptyName
                    // token labels: prptyName
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_prptyName=new RewriteRuleTokenStream(adaptor,"token prptyName",prptyName);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1342:83: -> ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:86: ^( TOK_SHOW_TBLPROPERTIES tableName ( $prptyName)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOW_TBLPROPERTIES, "TOK_SHOW_TBLPROPERTIES")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1342:122: ( $prptyName)?
                        if ( stream_prptyName.hasNext() ) {
                            adaptor.addChild(root_1, stream_prptyName.nextNode());

                        }
                        stream_prptyName.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 9 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:7: KW_SHOW KW_LOCKS (parttype= partTypeExpr )? (isExtended= KW_EXTENDED )?
                    {
                    KW_SHOW428=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6920);  
                    stream_KW_SHOW.add(KW_SHOW428);


                    KW_LOCKS429=(Token)match(input,KW_LOCKS,FOLLOW_KW_LOCKS_in_showStatement6922);  
                    stream_KW_LOCKS.add(KW_LOCKS429);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:24: (parttype= partTypeExpr )?
                    int alt124=2;
                    switch ( input.LA(1) ) {
                        case Identifier:
                        case KW_ADD:
                        case KW_ADMIN:
                        case KW_AFTER:
                        case KW_ALL:
                        case KW_ALTER:
                        case KW_ANALYZE:
                        case KW_ARCHIVE:
                        case KW_ARRAY:
                        case KW_AS:
                        case KW_ASC:
                        case KW_AUTHORIZATION:
                        case KW_BEFORE:
                        case KW_BETWEEN:
                        case KW_BIGINT:
                        case KW_BINARY:
                        case KW_BOOLEAN:
                        case KW_BOTH:
                        case KW_BUCKET:
                        case KW_BUCKETS:
                        case KW_BY:
                        case KW_CASCADE:
                        case KW_CHANGE:
                        case KW_CLUSTER:
                        case KW_CLUSTERED:
                        case KW_CLUSTERSTATUS:
                        case KW_COLLECTION:
                        case KW_COLUMNS:
                        case KW_COMMENT:
                        case KW_COMPACT:
                        case KW_COMPACTIONS:
                        case KW_COMPUTE:
                        case KW_CONCATENATE:
                        case KW_CONTINUE:
                        case KW_CREATE:
                        case KW_CUBE:
                        case KW_CURSOR:
                        case KW_DATA:
                        case KW_DATABASES:
                        case KW_DATE:
                        case KW_DATETIME:
                        case KW_DBPROPERTIES:
                        case KW_DECIMAL:
                        case KW_DEFAULT:
                        case KW_DEFERRED:
                        case KW_DEFINED:
                        case KW_DELETE:
                        case KW_DELIMITED:
                        case KW_DEPENDENCY:
                        case KW_DESC:
                        case KW_DESCRIBE:
                        case KW_DIRECTORIES:
                        case KW_DIRECTORY:
                        case KW_DISABLE:
                        case KW_DISTRIBUTE:
                        case KW_DOUBLE:
                        case KW_DROP:
                        case KW_ELEM_TYPE:
                        case KW_ENABLE:
                        case KW_ESCAPED:
                        case KW_EXCLUSIVE:
                        case KW_EXISTS:
                        case KW_EXPLAIN:
                        case KW_EXPORT:
                        case KW_EXTERNAL:
                        case KW_FALSE:
                        case KW_FETCH:
                        case KW_FIELDS:
                        case KW_FILE:
                        case KW_FILEFORMAT:
                        case KW_FIRST:
                        case KW_FLOAT:
                        case KW_FOR:
                        case KW_FORMAT:
                        case KW_FORMATTED:
                        case KW_FULL:
                        case KW_FUNCTIONS:
                        case KW_GRANT:
                        case KW_GROUP:
                        case KW_GROUPING:
                        case KW_HOLD_DDLTIME:
                        case KW_IDXPROPERTIES:
                        case KW_IGNORE:
                        case KW_IMPORT:
                        case KW_IN:
                        case KW_INDEX:
                        case KW_INDEXES:
                        case KW_INNER:
                        case KW_INPATH:
                        case KW_INPUTDRIVER:
                        case KW_INPUTFORMAT:
                        case KW_INSERT:
                        case KW_INT:
                        case KW_INTERSECT:
                        case KW_INTO:
                        case KW_IS:
                        case KW_ITEMS:
                        case KW_JAR:
                        case KW_KEYS:
                        case KW_KEY_TYPE:
                        case KW_LATERAL:
                        case KW_LEFT:
                        case KW_LIKE:
                        case KW_LIMIT:
                        case KW_LINES:
                        case KW_LOAD:
                        case KW_LOCAL:
                        case KW_LOCATION:
                        case KW_LOCK:
                        case KW_LOCKS:
                        case KW_LOGICAL:
                        case KW_LONG:
                        case KW_MAPJOIN:
                        case KW_MATERIALIZED:
                        case KW_MINUS:
                        case KW_MSCK:
                        case KW_NONE:
                        case KW_NOSCAN:
                        case KW_NO_DROP:
                        case KW_NULL:
                        case KW_OF:
                        case KW_OFFLINE:
                        case KW_OPTION:
                        case KW_ORDER:
                        case KW_OUT:
                        case KW_OUTER:
                        case KW_OUTPUTDRIVER:
                        case KW_OUTPUTFORMAT:
                        case KW_OVERWRITE:
                        case KW_OWNER:
                        case KW_PARTITION:
                        case KW_PARTITIONED:
                        case KW_PARTITIONS:
                        case KW_PERCENT:
                        case KW_PLUS:
                        case KW_PRETTY:
                        case KW_PRINCIPALS:
                        case KW_PROCEDURE:
                        case KW_PROTECTION:
                        case KW_PURGE:
                        case KW_RANGE:
                        case KW_READ:
                        case KW_READONLY:
                        case KW_READS:
                        case KW_REBUILD:
                        case KW_RECORDREADER:
                        case KW_RECORDWRITER:
                        case KW_REGEXP:
                        case KW_RENAME:
                        case KW_REPAIR:
                        case KW_REPLACE:
                        case KW_RESTRICT:
                        case KW_REVOKE:
                        case KW_REWRITE:
                        case KW_RIGHT:
                        case KW_RLIKE:
                        case KW_ROLE:
                        case KW_ROLES:
                        case KW_ROLLUP:
                        case KW_ROW:
                        case KW_ROWS:
                        case KW_SCHEMA:
                        case KW_SCHEMAS:
                        case KW_SEMI:
                        case KW_SERDE:
                        case KW_SERDEPROPERTIES:
                        case KW_SET:
                        case KW_SETS:
                        case KW_SHARED:
                        case KW_SHOW:
                        case KW_SHOW_DATABASE:
                        case KW_SKEWED:
                        case KW_SMALLINT:
                        case KW_SORT:
                        case KW_SORTED:
                        case KW_SSL:
                        case KW_STATISTICS:
                        case KW_STORED:
                        case KW_STREAMTABLE:
                        case KW_STRING:
                        case KW_STRUCT:
                        case KW_TABLE:
                        case KW_TABLES:
                        case KW_TBLPROPERTIES:
                        case KW_TEMPORARY:
                        case KW_TERMINATED:
                        case KW_TIMESTAMP:
                        case KW_TINYINT:
                        case KW_TO:
                        case KW_TOUCH:
                        case KW_TRANSACTIONS:
                        case KW_TRIGGER:
                        case KW_TRUE:
                        case KW_TRUNCATE:
                        case KW_UNARCHIVE:
                        case KW_UNDO:
                        case KW_UNION:
                        case KW_UNIONTYPE:
                        case KW_UNLOCK:
                        case KW_UNSET:
                        case KW_UNSIGNED:
                        case KW_UPDATE:
                        case KW_USE:
                        case KW_USER:
                        case KW_USING:
                        case KW_UTC:
                        case KW_UTCTIMESTAMP:
                        case KW_VALUES:
                        case KW_VALUE_TYPE:
                        case KW_VIEW:
                        case KW_WHILE:
                        case KW_WITH:
                            {
                            alt124=1;
                            }
                            break;
                    }

                    switch (alt124) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:25: parttype= partTypeExpr
                            {
                            pushFollow(FOLLOW_partTypeExpr_in_showStatement6927);
                            parttype=partTypeExpr();

                            state._fsp--;

                            stream_partTypeExpr.add(parttype.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:49: (isExtended= KW_EXTENDED )?
                    int alt125=2;
                    switch ( input.LA(1) ) {
                        case KW_EXTENDED:
                            {
                            alt125=1;
                            }
                            break;
                    }

                    switch (alt125) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:50: isExtended= KW_EXTENDED
                            {
                            isExtended=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_showStatement6934);  
                            stream_KW_EXTENDED.add(isExtended);


                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: isExtended, parttype
                    // token labels: isExtended
                    // rule labels: retval, parttype
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_isExtended=new RewriteRuleTokenStream(adaptor,"token isExtended",isExtended);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_parttype=new RewriteRuleSubtreeStream(adaptor,"rule parttype",parttype!=null?parttype.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1343:75: -> ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:78: ^( TOK_SHOWLOCKS ( $parttype)? ( $isExtended)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWLOCKS, "TOK_SHOWLOCKS")
                        , root_1);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:95: ( $parttype)?
                        if ( stream_parttype.hasNext() ) {
                            adaptor.addChild(root_1, stream_parttype.nextTree());

                        }
                        stream_parttype.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1343:106: ( $isExtended)?
                        if ( stream_isExtended.hasNext() ) {
                            adaptor.addChild(root_1, stream_isExtended.nextNode());

                        }
                        stream_isExtended.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 10 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:7: KW_SHOW KW_LOCKS ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) (isExtended= KW_EXTENDED )?
                    {
                    KW_SHOW430=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement6958);  
                    stream_KW_SHOW.add(KW_SHOW430);


                    KW_LOCKS431=(Token)match(input,KW_LOCKS,FOLLOW_KW_LOCKS_in_showStatement6960);  
                    stream_KW_LOCKS.add(KW_LOCKS431);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:24: ( KW_DATABASE | KW_SCHEMA )
                    int alt126=2;
                    switch ( input.LA(1) ) {
                    case KW_DATABASE:
                        {
                        alt126=1;
                        }
                        break;
                    case KW_SCHEMA:
                        {
                        alt126=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 126, 0, input);

                        throw nvae;

                    }

                    switch (alt126) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:25: KW_DATABASE
                            {
                            KW_DATABASE432=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_showStatement6963);  
                            stream_KW_DATABASE.add(KW_DATABASE432);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:37: KW_SCHEMA
                            {
                            KW_SCHEMA433=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_showStatement6965);  
                            stream_KW_SCHEMA.add(KW_SCHEMA433);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:48: (dbName= Identifier )
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:49: dbName= Identifier
                    {
                    dbName=(Token)match(input,Identifier,FOLLOW_Identifier_in_showStatement6971);  
                    stream_Identifier.add(dbName);


                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:68: (isExtended= KW_EXTENDED )?
                    int alt127=2;
                    switch ( input.LA(1) ) {
                        case KW_EXTENDED:
                            {
                            alt127=1;
                            }
                            break;
                    }

                    switch (alt127) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:69: isExtended= KW_EXTENDED
                            {
                            isExtended=(Token)match(input,KW_EXTENDED,FOLLOW_KW_EXTENDED_in_showStatement6977);  
                            stream_KW_EXTENDED.add(isExtended);


                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: isExtended, dbName
                    // token labels: isExtended, dbName
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_isExtended=new RewriteRuleTokenStream(adaptor,"token isExtended",isExtended);
                    RewriteRuleTokenStream stream_dbName=new RewriteRuleTokenStream(adaptor,"token dbName",dbName);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1344:94: -> ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:97: ^( TOK_SHOWDBLOCKS $dbName ( $isExtended)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWDBLOCKS, "TOK_SHOWDBLOCKS")
                        , root_1);

                        adaptor.addChild(root_1, stream_dbName.nextNode());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1344:124: ( $isExtended)?
                        if ( stream_isExtended.hasNext() ) {
                            adaptor.addChild(root_1, stream_isExtended.nextNode());

                        }
                        stream_isExtended.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 11 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:7: KW_SHOW (showOptions= KW_FORMATTED )? ( KW_INDEX | KW_INDEXES ) KW_ON showStmtIdentifier ( ( KW_FROM | KW_IN ) db_name= identifier )?
                    {
                    KW_SHOW434=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7000);  
                    stream_KW_SHOW.add(KW_SHOW434);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:15: (showOptions= KW_FORMATTED )?
                    int alt128=2;
                    switch ( input.LA(1) ) {
                        case KW_FORMATTED:
                            {
                            alt128=1;
                            }
                            break;
                    }

                    switch (alt128) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:16: showOptions= KW_FORMATTED
                            {
                            showOptions=(Token)match(input,KW_FORMATTED,FOLLOW_KW_FORMATTED_in_showStatement7005);  
                            stream_KW_FORMATTED.add(showOptions);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:43: ( KW_INDEX | KW_INDEXES )
                    int alt129=2;
                    switch ( input.LA(1) ) {
                    case KW_INDEX:
                        {
                        alt129=1;
                        }
                        break;
                    case KW_INDEXES:
                        {
                        alt129=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 129, 0, input);

                        throw nvae;

                    }

                    switch (alt129) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:44: KW_INDEX
                            {
                            KW_INDEX435=(Token)match(input,KW_INDEX,FOLLOW_KW_INDEX_in_showStatement7010);  
                            stream_KW_INDEX.add(KW_INDEX435);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:53: KW_INDEXES
                            {
                            KW_INDEXES436=(Token)match(input,KW_INDEXES,FOLLOW_KW_INDEXES_in_showStatement7012);  
                            stream_KW_INDEXES.add(KW_INDEXES436);


                            }
                            break;

                    }


                    KW_ON437=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_showStatement7015);  
                    stream_KW_ON.add(KW_ON437);


                    pushFollow(FOLLOW_showStmtIdentifier_in_showStatement7017);
                    showStmtIdentifier438=showStmtIdentifier();

                    state._fsp--;

                    stream_showStmtIdentifier.add(showStmtIdentifier438.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:90: ( ( KW_FROM | KW_IN ) db_name= identifier )?
                    int alt131=2;
                    switch ( input.LA(1) ) {
                        case KW_FROM:
                        case KW_IN:
                            {
                            alt131=1;
                            }
                            break;
                    }

                    switch (alt131) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:91: ( KW_FROM | KW_IN ) db_name= identifier
                            {
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:91: ( KW_FROM | KW_IN )
                            int alt130=2;
                            switch ( input.LA(1) ) {
                            case KW_FROM:
                                {
                                alt130=1;
                                }
                                break;
                            case KW_IN:
                                {
                                alt130=2;
                                }
                                break;
                            default:
                                NoViableAltException nvae =
                                    new NoViableAltException("", 130, 0, input);

                                throw nvae;

                            }

                            switch (alt130) {
                                case 1 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:92: KW_FROM
                                    {
                                    KW_FROM439=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_showStatement7021);  
                                    stream_KW_FROM.add(KW_FROM439);


                                    }
                                    break;
                                case 2 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1345:100: KW_IN
                                    {
                                    KW_IN440=(Token)match(input,KW_IN,FOLLOW_KW_IN_in_showStatement7023);  
                                    stream_KW_IN.add(KW_IN440);


                                    }
                                    break;

                            }


                            pushFollow(FOLLOW_identifier_in_showStatement7028);
                            db_name=identifier();

                            state._fsp--;

                            stream_identifier.add(db_name.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: db_name, showOptions, showStmtIdentifier
                    // token labels: showOptions
                    // rule labels: retval, db_name
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_showOptions=new RewriteRuleTokenStream(adaptor,"token showOptions",showOptions);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_db_name=new RewriteRuleSubtreeStream(adaptor,"rule db_name",db_name!=null?db_name.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1346:5: -> ^( TOK_SHOWINDEXES showStmtIdentifier ( $showOptions)? ( $db_name)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1346:8: ^( TOK_SHOWINDEXES showStmtIdentifier ( $showOptions)? ( $db_name)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWINDEXES, "TOK_SHOWINDEXES")
                        , root_1);

                        adaptor.addChild(root_1, stream_showStmtIdentifier.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1346:46: ( $showOptions)?
                        if ( stream_showOptions.hasNext() ) {
                            adaptor.addChild(root_1, stream_showOptions.nextNode());

                        }
                        stream_showOptions.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1346:60: ( $db_name)?
                        if ( stream_db_name.hasNext() ) {
                            adaptor.addChild(root_1, stream_db_name.nextTree());

                        }
                        stream_db_name.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 12 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1347:7: KW_SHOW KW_COMPACTIONS
                    {
                    KW_SHOW441=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7058);  
                    stream_KW_SHOW.add(KW_SHOW441);


                    KW_COMPACTIONS442=(Token)match(input,KW_COMPACTIONS,FOLLOW_KW_COMPACTIONS_in_showStatement7060);  
                    stream_KW_COMPACTIONS.add(KW_COMPACTIONS442);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1347:30: -> ^( TOK_SHOW_COMPACTIONS )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1347:33: ^( TOK_SHOW_COMPACTIONS )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOW_COMPACTIONS, "TOK_SHOW_COMPACTIONS")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 13 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1348:7: KW_SHOW KW_TRANSACTIONS
                    {
                    KW_SHOW443=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7074);  
                    stream_KW_SHOW.add(KW_SHOW443);


                    KW_TRANSACTIONS444=(Token)match(input,KW_TRANSACTIONS,FOLLOW_KW_TRANSACTIONS_in_showStatement7076);  
                    stream_KW_TRANSACTIONS.add(KW_TRANSACTIONS444);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1348:31: -> ^( TOK_SHOW_TRANSACTIONS )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1348:34: ^( TOK_SHOW_TRANSACTIONS )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOW_TRANSACTIONS, "TOK_SHOW_TRANSACTIONS")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 14 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1349:7: KW_SHOW KW_CONF StringLiteral
                    {
                    KW_SHOW445=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showStatement7090);  
                    stream_KW_SHOW.add(KW_SHOW445);


                    KW_CONF446=(Token)match(input,KW_CONF,FOLLOW_KW_CONF_in_showStatement7092);  
                    stream_KW_CONF.add(KW_CONF446);


                    StringLiteral447=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showStatement7094);  
                    stream_StringLiteral.add(StringLiteral447);


                    // AST REWRITE
                    // elements: StringLiteral
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1349:37: -> ^( TOK_SHOWCONF StringLiteral )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1349:40: ^( TOK_SHOWCONF StringLiteral )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SHOWCONF, "TOK_SHOWCONF")
                        , root_1);

                        adaptor.addChild(root_1, 
                        stream_StringLiteral.nextNode()
                        );

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showStatement"


    public static class lockStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "lockStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1352:1: lockStatement : KW_LOCK KW_TABLE tableName ( partitionSpec )? lockMode -> ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? ) ;
    public final HiveParser.lockStatement_return lockStatement() throws RecognitionException {
        HiveParser.lockStatement_return retval = new HiveParser.lockStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_LOCK448=null;
        Token KW_TABLE449=null;
        HiveParser_FromClauseParser.tableName_return tableName450 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec451 =null;

        HiveParser.lockMode_return lockMode452 =null;


        CommonTree KW_LOCK448_tree=null;
        CommonTree KW_TABLE449_tree=null;
        RewriteRuleTokenStream stream_KW_LOCK=new RewriteRuleTokenStream(adaptor,"token KW_LOCK");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
        RewriteRuleSubtreeStream stream_lockMode=new RewriteRuleSubtreeStream(adaptor,"rule lockMode");
         pushMsg("lock statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1355:5: ( KW_LOCK KW_TABLE tableName ( partitionSpec )? lockMode -> ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1355:7: KW_LOCK KW_TABLE tableName ( partitionSpec )? lockMode
            {
            KW_LOCK448=(Token)match(input,KW_LOCK,FOLLOW_KW_LOCK_in_lockStatement7129);  
            stream_KW_LOCK.add(KW_LOCK448);


            KW_TABLE449=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_lockStatement7131);  
            stream_KW_TABLE.add(KW_TABLE449);


            pushFollow(FOLLOW_tableName_in_lockStatement7133);
            tableName450=tableName();

            state._fsp--;

            stream_tableName.add(tableName450.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1355:34: ( partitionSpec )?
            int alt133=2;
            switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt133=1;
                    }
                    break;
            }

            switch (alt133) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1355:34: partitionSpec
                    {
                    pushFollow(FOLLOW_partitionSpec_in_lockStatement7135);
                    partitionSpec451=partitionSpec();

                    state._fsp--;

                    stream_partitionSpec.add(partitionSpec451.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_lockMode_in_lockStatement7138);
            lockMode452=lockMode();

            state._fsp--;

            stream_lockMode.add(lockMode452.getTree());

            // AST REWRITE
            // elements: lockMode, tableName, partitionSpec
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1355:58: -> ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1355:61: ^( TOK_LOCKTABLE tableName lockMode ( partitionSpec )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_LOCKTABLE, "TOK_LOCKTABLE")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                adaptor.addChild(root_1, stream_lockMode.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1355:96: ( partitionSpec )?
                if ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "lockStatement"


    public static class lockDatabase_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "lockDatabase"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1358:1: lockDatabase : KW_LOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) lockMode -> ^( TOK_LOCKDB $dbName lockMode ) ;
    public final HiveParser.lockDatabase_return lockDatabase() throws RecognitionException {
        HiveParser.lockDatabase_return retval = new HiveParser.lockDatabase_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token dbName=null;
        Token KW_LOCK453=null;
        Token KW_DATABASE454=null;
        Token KW_SCHEMA455=null;
        HiveParser.lockMode_return lockMode456 =null;


        CommonTree dbName_tree=null;
        CommonTree KW_LOCK453_tree=null;
        CommonTree KW_DATABASE454_tree=null;
        CommonTree KW_SCHEMA455_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_LOCK=new RewriteRuleTokenStream(adaptor,"token KW_LOCK");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_Identifier=new RewriteRuleTokenStream(adaptor,"token Identifier");
        RewriteRuleSubtreeStream stream_lockMode=new RewriteRuleSubtreeStream(adaptor,"rule lockMode");
         pushMsg("lock database statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:5: ( KW_LOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) lockMode -> ^( TOK_LOCKDB $dbName lockMode ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:7: KW_LOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) lockMode
            {
            KW_LOCK453=(Token)match(input,KW_LOCK,FOLLOW_KW_LOCK_in_lockDatabase7178);  
            stream_KW_LOCK.add(KW_LOCK453);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:15: ( KW_DATABASE | KW_SCHEMA )
            int alt134=2;
            switch ( input.LA(1) ) {
            case KW_DATABASE:
                {
                alt134=1;
                }
                break;
            case KW_SCHEMA:
                {
                alt134=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 134, 0, input);

                throw nvae;

            }

            switch (alt134) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:16: KW_DATABASE
                    {
                    KW_DATABASE454=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_lockDatabase7181);  
                    stream_KW_DATABASE.add(KW_DATABASE454);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:28: KW_SCHEMA
                    {
                    KW_SCHEMA455=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_lockDatabase7183);  
                    stream_KW_SCHEMA.add(KW_SCHEMA455);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:39: (dbName= Identifier )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:40: dbName= Identifier
            {
            dbName=(Token)match(input,Identifier,FOLLOW_Identifier_in_lockDatabase7189);  
            stream_Identifier.add(dbName);


            }


            pushFollow(FOLLOW_lockMode_in_lockDatabase7192);
            lockMode456=lockMode();

            state._fsp--;

            stream_lockMode.add(lockMode456.getTree());

            // AST REWRITE
            // elements: lockMode, dbName
            // token labels: dbName
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_dbName=new RewriteRuleTokenStream(adaptor,"token dbName",dbName);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1361:68: -> ^( TOK_LOCKDB $dbName lockMode )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1361:71: ^( TOK_LOCKDB $dbName lockMode )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_LOCKDB, "TOK_LOCKDB")
                , root_1);

                adaptor.addChild(root_1, stream_dbName.nextNode());

                adaptor.addChild(root_1, stream_lockMode.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "lockDatabase"


    public static class lockMode_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "lockMode"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1364:1: lockMode : ( KW_SHARED | KW_EXCLUSIVE );
    public final HiveParser.lockMode_return lockMode() throws RecognitionException {
        HiveParser.lockMode_return retval = new HiveParser.lockMode_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token set457=null;

        CommonTree set457_tree=null;

         pushMsg("lock mode", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1367:5: ( KW_SHARED | KW_EXCLUSIVE )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:
            {
            root_0 = (CommonTree)adaptor.nil();


            set457=(Token)input.LT(1);

            if ( input.LA(1)==KW_EXCLUSIVE||input.LA(1)==KW_SHARED ) {
                input.consume();
                adaptor.addChild(root_0, 
                (CommonTree)adaptor.create(set457)
                );
                state.errorRecovery=false;
            }
            else {
                MismatchedSetException mse = new MismatchedSetException(null,input);
                throw mse;
            }


            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "lockMode"


    public static class unlockStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "unlockStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1370:1: unlockStatement : KW_UNLOCK KW_TABLE tableName ( partitionSpec )? -> ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? ) ;
    public final HiveParser.unlockStatement_return unlockStatement() throws RecognitionException {
        HiveParser.unlockStatement_return retval = new HiveParser.unlockStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_UNLOCK458=null;
        Token KW_TABLE459=null;
        HiveParser_FromClauseParser.tableName_return tableName460 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec461 =null;


        CommonTree KW_UNLOCK458_tree=null;
        CommonTree KW_TABLE459_tree=null;
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_KW_UNLOCK=new RewriteRuleTokenStream(adaptor,"token KW_UNLOCK");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("unlock statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1373:5: ( KW_UNLOCK KW_TABLE tableName ( partitionSpec )? -> ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1373:7: KW_UNLOCK KW_TABLE tableName ( partitionSpec )?
            {
            KW_UNLOCK458=(Token)match(input,KW_UNLOCK,FOLLOW_KW_UNLOCK_in_unlockStatement7261);  
            stream_KW_UNLOCK.add(KW_UNLOCK458);


            KW_TABLE459=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_unlockStatement7263);  
            stream_KW_TABLE.add(KW_TABLE459);


            pushFollow(FOLLOW_tableName_in_unlockStatement7265);
            tableName460=tableName();

            state._fsp--;

            stream_tableName.add(tableName460.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1373:36: ( partitionSpec )?
            int alt135=2;
            switch ( input.LA(1) ) {
                case KW_PARTITION:
                    {
                    alt135=1;
                    }
                    break;
            }

            switch (alt135) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1373:36: partitionSpec
                    {
                    pushFollow(FOLLOW_partitionSpec_in_unlockStatement7267);
                    partitionSpec461=partitionSpec();

                    state._fsp--;

                    stream_partitionSpec.add(partitionSpec461.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: partitionSpec, tableName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1373:52: -> ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1373:55: ^( TOK_UNLOCKTABLE tableName ( partitionSpec )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_UNLOCKTABLE, "TOK_UNLOCKTABLE")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1373:83: ( partitionSpec )?
                if ( stream_partitionSpec.hasNext() ) {
                    adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                }
                stream_partitionSpec.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "unlockStatement"


    public static class unlockDatabase_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "unlockDatabase"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1376:1: unlockDatabase : KW_UNLOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) -> ^( TOK_UNLOCKDB $dbName) ;
    public final HiveParser.unlockDatabase_return unlockDatabase() throws RecognitionException {
        HiveParser.unlockDatabase_return retval = new HiveParser.unlockDatabase_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token dbName=null;
        Token KW_UNLOCK462=null;
        Token KW_DATABASE463=null;
        Token KW_SCHEMA464=null;

        CommonTree dbName_tree=null;
        CommonTree KW_UNLOCK462_tree=null;
        CommonTree KW_DATABASE463_tree=null;
        CommonTree KW_SCHEMA464_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_KW_UNLOCK=new RewriteRuleTokenStream(adaptor,"token KW_UNLOCK");
        RewriteRuleTokenStream stream_Identifier=new RewriteRuleTokenStream(adaptor,"token Identifier");

         pushMsg("unlock database statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:5: ( KW_UNLOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier ) -> ^( TOK_UNLOCKDB $dbName) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:7: KW_UNLOCK ( KW_DATABASE | KW_SCHEMA ) (dbName= Identifier )
            {
            KW_UNLOCK462=(Token)match(input,KW_UNLOCK,FOLLOW_KW_UNLOCK_in_unlockDatabase7307);  
            stream_KW_UNLOCK.add(KW_UNLOCK462);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:17: ( KW_DATABASE | KW_SCHEMA )
            int alt136=2;
            switch ( input.LA(1) ) {
            case KW_DATABASE:
                {
                alt136=1;
                }
                break;
            case KW_SCHEMA:
                {
                alt136=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 136, 0, input);

                throw nvae;

            }

            switch (alt136) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:18: KW_DATABASE
                    {
                    KW_DATABASE463=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_unlockDatabase7310);  
                    stream_KW_DATABASE.add(KW_DATABASE463);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:30: KW_SCHEMA
                    {
                    KW_SCHEMA464=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_unlockDatabase7312);  
                    stream_KW_SCHEMA.add(KW_SCHEMA464);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:41: (dbName= Identifier )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:42: dbName= Identifier
            {
            dbName=(Token)match(input,Identifier,FOLLOW_Identifier_in_unlockDatabase7318);  
            stream_Identifier.add(dbName);


            }


            // AST REWRITE
            // elements: dbName
            // token labels: dbName
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_dbName=new RewriteRuleTokenStream(adaptor,"token dbName",dbName);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1379:61: -> ^( TOK_UNLOCKDB $dbName)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1379:64: ^( TOK_UNLOCKDB $dbName)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_UNLOCKDB, "TOK_UNLOCKDB")
                , root_1);

                adaptor.addChild(root_1, stream_dbName.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "unlockDatabase"


    public static class createRoleStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "createRoleStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1382:1: createRoleStatement : KW_CREATE KW_ROLE roleName= identifier -> ^( TOK_CREATEROLE $roleName) ;
    public final HiveParser.createRoleStatement_return createRoleStatement() throws RecognitionException {
        HiveParser.createRoleStatement_return retval = new HiveParser.createRoleStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_CREATE465=null;
        Token KW_ROLE466=null;
        HiveParser_IdentifiersParser.identifier_return roleName =null;


        CommonTree KW_CREATE465_tree=null;
        CommonTree KW_ROLE466_tree=null;
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("create role", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1385:5: ( KW_CREATE KW_ROLE roleName= identifier -> ^( TOK_CREATEROLE $roleName) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1385:7: KW_CREATE KW_ROLE roleName= identifier
            {
            KW_CREATE465=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createRoleStatement7355);  
            stream_KW_CREATE.add(KW_CREATE465);


            KW_ROLE466=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_createRoleStatement7357);  
            stream_KW_ROLE.add(KW_ROLE466);


            pushFollow(FOLLOW_identifier_in_createRoleStatement7361);
            roleName=identifier();

            state._fsp--;

            stream_identifier.add(roleName.getTree());

            // AST REWRITE
            // elements: roleName
            // token labels: 
            // rule labels: retval, roleName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_roleName=new RewriteRuleSubtreeStream(adaptor,"rule roleName",roleName!=null?roleName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1386:5: -> ^( TOK_CREATEROLE $roleName)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1386:8: ^( TOK_CREATEROLE $roleName)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEROLE, "TOK_CREATEROLE")
                , root_1);

                adaptor.addChild(root_1, stream_roleName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "createRoleStatement"


    public static class dropRoleStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dropRoleStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1389:1: dropRoleStatement : KW_DROP KW_ROLE roleName= identifier -> ^( TOK_DROPROLE $roleName) ;
    public final HiveParser.dropRoleStatement_return dropRoleStatement() throws RecognitionException {
        HiveParser.dropRoleStatement_return retval = new HiveParser.dropRoleStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DROP467=null;
        Token KW_ROLE468=null;
        HiveParser_IdentifiersParser.identifier_return roleName =null;


        CommonTree KW_DROP467_tree=null;
        CommonTree KW_ROLE468_tree=null;
        RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        pushMsg("drop role", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1392:5: ( KW_DROP KW_ROLE roleName= identifier -> ^( TOK_DROPROLE $roleName) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1392:7: KW_DROP KW_ROLE roleName= identifier
            {
            KW_DROP467=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropRoleStatement7401);  
            stream_KW_DROP.add(KW_DROP467);


            KW_ROLE468=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_dropRoleStatement7403);  
            stream_KW_ROLE.add(KW_ROLE468);


            pushFollow(FOLLOW_identifier_in_dropRoleStatement7407);
            roleName=identifier();

            state._fsp--;

            stream_identifier.add(roleName.getTree());

            // AST REWRITE
            // elements: roleName
            // token labels: 
            // rule labels: retval, roleName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_roleName=new RewriteRuleSubtreeStream(adaptor,"rule roleName",roleName!=null?roleName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1393:5: -> ^( TOK_DROPROLE $roleName)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1393:8: ^( TOK_DROPROLE $roleName)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPROLE, "TOK_DROPROLE")
                , root_1);

                adaptor.addChild(root_1, stream_roleName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dropRoleStatement"


    public static class grantPrivileges_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "grantPrivileges"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1396:1: grantPrivileges : KW_GRANT privList= privilegeList ( privilegeObject )? KW_TO principalSpecification ( withGrantOption )? -> ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? ) ;
    public final HiveParser.grantPrivileges_return grantPrivileges() throws RecognitionException {
        HiveParser.grantPrivileges_return retval = new HiveParser.grantPrivileges_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_GRANT469=null;
        Token KW_TO471=null;
        HiveParser.privilegeList_return privList =null;

        HiveParser.privilegeObject_return privilegeObject470 =null;

        HiveParser.principalSpecification_return principalSpecification472 =null;

        HiveParser.withGrantOption_return withGrantOption473 =null;


        CommonTree KW_GRANT469_tree=null;
        CommonTree KW_TO471_tree=null;
        RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
        RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
        RewriteRuleSubtreeStream stream_withGrantOption=new RewriteRuleSubtreeStream(adaptor,"rule withGrantOption");
        RewriteRuleSubtreeStream stream_privilegeList=new RewriteRuleSubtreeStream(adaptor,"rule privilegeList");
        RewriteRuleSubtreeStream stream_privilegeObject=new RewriteRuleSubtreeStream(adaptor,"rule privilegeObject");
        RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");
        pushMsg("grant privileges", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1399:5: ( KW_GRANT privList= privilegeList ( privilegeObject )? KW_TO principalSpecification ( withGrantOption )? -> ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1399:7: KW_GRANT privList= privilegeList ( privilegeObject )? KW_TO principalSpecification ( withGrantOption )?
            {
            KW_GRANT469=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_grantPrivileges7447);  
            stream_KW_GRANT.add(KW_GRANT469);


            pushFollow(FOLLOW_privilegeList_in_grantPrivileges7451);
            privList=privilegeList();

            state._fsp--;

            stream_privilegeList.add(privList.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1400:7: ( privilegeObject )?
            int alt137=2;
            switch ( input.LA(1) ) {
                case KW_ON:
                    {
                    alt137=1;
                    }
                    break;
            }

            switch (alt137) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1400:7: privilegeObject
                    {
                    pushFollow(FOLLOW_privilegeObject_in_grantPrivileges7459);
                    privilegeObject470=privilegeObject();

                    state._fsp--;

                    stream_privilegeObject.add(privilegeObject470.getTree());

                    }
                    break;

            }


            KW_TO471=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_grantPrivileges7468);  
            stream_KW_TO.add(KW_TO471);


            pushFollow(FOLLOW_principalSpecification_in_grantPrivileges7470);
            principalSpecification472=principalSpecification();

            state._fsp--;

            stream_principalSpecification.add(principalSpecification472.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1402:7: ( withGrantOption )?
            int alt138=2;
            switch ( input.LA(1) ) {
                case KW_WITH:
                    {
                    alt138=1;
                    }
                    break;
            }

            switch (alt138) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1402:7: withGrantOption
                    {
                    pushFollow(FOLLOW_withGrantOption_in_grantPrivileges7478);
                    withGrantOption473=withGrantOption();

                    state._fsp--;

                    stream_withGrantOption.add(withGrantOption473.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: withGrantOption, privilegeObject, principalSpecification, privList
            // token labels: 
            // rule labels: retval, privList
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_privList=new RewriteRuleSubtreeStream(adaptor,"rule privList",privList!=null?privList.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1403:5: -> ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1403:8: ^( TOK_GRANT $privList principalSpecification ( privilegeObject )? ( withGrantOption )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_GRANT, "TOK_GRANT")
                , root_1);

                adaptor.addChild(root_1, stream_privList.nextTree());

                adaptor.addChild(root_1, stream_principalSpecification.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1403:53: ( privilegeObject )?
                if ( stream_privilegeObject.hasNext() ) {
                    adaptor.addChild(root_1, stream_privilegeObject.nextTree());

                }
                stream_privilegeObject.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1403:70: ( withGrantOption )?
                if ( stream_withGrantOption.hasNext() ) {
                    adaptor.addChild(root_1, stream_withGrantOption.nextTree());

                }
                stream_withGrantOption.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "grantPrivileges"


    public static class revokePrivileges_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "revokePrivileges"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1406:1: revokePrivileges : KW_REVOKE ( grantOptionFor )? privilegeList ( privilegeObject )? KW_FROM principalSpecification -> ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? ) ;
    public final HiveParser.revokePrivileges_return revokePrivileges() throws RecognitionException {
        HiveParser.revokePrivileges_return retval = new HiveParser.revokePrivileges_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_REVOKE474=null;
        Token KW_FROM478=null;
        HiveParser.grantOptionFor_return grantOptionFor475 =null;

        HiveParser.privilegeList_return privilegeList476 =null;

        HiveParser.privilegeObject_return privilegeObject477 =null;

        HiveParser.principalSpecification_return principalSpecification479 =null;


        CommonTree KW_REVOKE474_tree=null;
        CommonTree KW_FROM478_tree=null;
        RewriteRuleTokenStream stream_KW_REVOKE=new RewriteRuleTokenStream(adaptor,"token KW_REVOKE");
        RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
        RewriteRuleSubtreeStream stream_grantOptionFor=new RewriteRuleSubtreeStream(adaptor,"rule grantOptionFor");
        RewriteRuleSubtreeStream stream_privilegeList=new RewriteRuleSubtreeStream(adaptor,"rule privilegeList");
        RewriteRuleSubtreeStream stream_privilegeObject=new RewriteRuleSubtreeStream(adaptor,"rule privilegeObject");
        RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");
        pushMsg("revoke privileges", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1409:5: ( KW_REVOKE ( grantOptionFor )? privilegeList ( privilegeObject )? KW_FROM principalSpecification -> ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1409:7: KW_REVOKE ( grantOptionFor )? privilegeList ( privilegeObject )? KW_FROM principalSpecification
            {
            KW_REVOKE474=(Token)match(input,KW_REVOKE,FOLLOW_KW_REVOKE_in_revokePrivileges7527);  
            stream_KW_REVOKE.add(KW_REVOKE474);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1409:17: ( grantOptionFor )?
            int alt139=2;
            switch ( input.LA(1) ) {
                case KW_GRANT:
                    {
                    alt139=1;
                    }
                    break;
            }

            switch (alt139) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1409:17: grantOptionFor
                    {
                    pushFollow(FOLLOW_grantOptionFor_in_revokePrivileges7529);
                    grantOptionFor475=grantOptionFor();

                    state._fsp--;

                    stream_grantOptionFor.add(grantOptionFor475.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_privilegeList_in_revokePrivileges7532);
            privilegeList476=privilegeList();

            state._fsp--;

            stream_privilegeList.add(privilegeList476.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1409:47: ( privilegeObject )?
            int alt140=2;
            switch ( input.LA(1) ) {
                case KW_ON:
                    {
                    alt140=1;
                    }
                    break;
            }

            switch (alt140) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1409:47: privilegeObject
                    {
                    pushFollow(FOLLOW_privilegeObject_in_revokePrivileges7534);
                    privilegeObject477=privilegeObject();

                    state._fsp--;

                    stream_privilegeObject.add(privilegeObject477.getTree());

                    }
                    break;

            }


            KW_FROM478=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_revokePrivileges7537);  
            stream_KW_FROM.add(KW_FROM478);


            pushFollow(FOLLOW_principalSpecification_in_revokePrivileges7539);
            principalSpecification479=principalSpecification();

            state._fsp--;

            stream_principalSpecification.add(principalSpecification479.getTree());

            // AST REWRITE
            // elements: privilegeObject, privilegeList, principalSpecification, grantOptionFor
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1410:5: -> ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1410:8: ^( TOK_REVOKE privilegeList principalSpecification ( privilegeObject )? ( grantOptionFor )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_REVOKE, "TOK_REVOKE")
                , root_1);

                adaptor.addChild(root_1, stream_privilegeList.nextTree());

                adaptor.addChild(root_1, stream_principalSpecification.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1410:58: ( privilegeObject )?
                if ( stream_privilegeObject.hasNext() ) {
                    adaptor.addChild(root_1, stream_privilegeObject.nextTree());

                }
                stream_privilegeObject.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1410:75: ( grantOptionFor )?
                if ( stream_grantOptionFor.hasNext() ) {
                    adaptor.addChild(root_1, stream_grantOptionFor.nextTree());

                }
                stream_grantOptionFor.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "revokePrivileges"


    public static class grantRole_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "grantRole"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1413:1: grantRole : KW_GRANT ( KW_ROLE )? identifier ( COMMA identifier )* KW_TO principalSpecification ( withAdminOption )? -> ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ ) ;
    public final HiveParser.grantRole_return grantRole() throws RecognitionException {
        HiveParser.grantRole_return retval = new HiveParser.grantRole_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_GRANT480=null;
        Token KW_ROLE481=null;
        Token COMMA483=null;
        Token KW_TO485=null;
        HiveParser_IdentifiersParser.identifier_return identifier482 =null;

        HiveParser_IdentifiersParser.identifier_return identifier484 =null;

        HiveParser.principalSpecification_return principalSpecification486 =null;

        HiveParser.withAdminOption_return withAdminOption487 =null;


        CommonTree KW_GRANT480_tree=null;
        CommonTree KW_ROLE481_tree=null;
        CommonTree COMMA483_tree=null;
        CommonTree KW_TO485_tree=null;
        RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
        RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleTokenStream stream_KW_TO=new RewriteRuleTokenStream(adaptor,"token KW_TO");
        RewriteRuleSubtreeStream stream_withAdminOption=new RewriteRuleSubtreeStream(adaptor,"rule withAdminOption");
        RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        pushMsg("grant role", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:5: ( KW_GRANT ( KW_ROLE )? identifier ( COMMA identifier )* KW_TO principalSpecification ( withAdminOption )? -> ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:7: KW_GRANT ( KW_ROLE )? identifier ( COMMA identifier )* KW_TO principalSpecification ( withAdminOption )?
            {
            KW_GRANT480=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_grantRole7586);  
            stream_KW_GRANT.add(KW_GRANT480);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:16: ( KW_ROLE )?
            int alt141=2;
            switch ( input.LA(1) ) {
                case KW_ROLE:
                    {
                    switch ( input.LA(2) ) {
                        case Identifier:
                        case KW_ADD:
                        case KW_ADMIN:
                        case KW_AFTER:
                        case KW_ALL:
                        case KW_ALTER:
                        case KW_ANALYZE:
                        case KW_ARCHIVE:
                        case KW_ARRAY:
                        case KW_AS:
                        case KW_ASC:
                        case KW_AUTHORIZATION:
                        case KW_BEFORE:
                        case KW_BETWEEN:
                        case KW_BIGINT:
                        case KW_BINARY:
                        case KW_BOOLEAN:
                        case KW_BOTH:
                        case KW_BUCKET:
                        case KW_BUCKETS:
                        case KW_BY:
                        case KW_CASCADE:
                        case KW_CHANGE:
                        case KW_CLUSTER:
                        case KW_CLUSTERED:
                        case KW_CLUSTERSTATUS:
                        case KW_COLLECTION:
                        case KW_COLUMNS:
                        case KW_COMMENT:
                        case KW_COMPACT:
                        case KW_COMPACTIONS:
                        case KW_COMPUTE:
                        case KW_CONCATENATE:
                        case KW_CONTINUE:
                        case KW_CREATE:
                        case KW_CUBE:
                        case KW_CURSOR:
                        case KW_DATA:
                        case KW_DATABASES:
                        case KW_DATE:
                        case KW_DATETIME:
                        case KW_DBPROPERTIES:
                        case KW_DECIMAL:
                        case KW_DEFAULT:
                        case KW_DEFERRED:
                        case KW_DEFINED:
                        case KW_DELETE:
                        case KW_DELIMITED:
                        case KW_DEPENDENCY:
                        case KW_DESC:
                        case KW_DESCRIBE:
                        case KW_DIRECTORIES:
                        case KW_DIRECTORY:
                        case KW_DISABLE:
                        case KW_DISTRIBUTE:
                        case KW_DOUBLE:
                        case KW_DROP:
                        case KW_ELEM_TYPE:
                        case KW_ENABLE:
                        case KW_ESCAPED:
                        case KW_EXCLUSIVE:
                        case KW_EXISTS:
                        case KW_EXPLAIN:
                        case KW_EXPORT:
                        case KW_EXTERNAL:
                        case KW_FALSE:
                        case KW_FETCH:
                        case KW_FIELDS:
                        case KW_FILE:
                        case KW_FILEFORMAT:
                        case KW_FIRST:
                        case KW_FLOAT:
                        case KW_FOR:
                        case KW_FORMAT:
                        case KW_FORMATTED:
                        case KW_FULL:
                        case KW_FUNCTIONS:
                        case KW_GRANT:
                        case KW_GROUP:
                        case KW_GROUPING:
                        case KW_HOLD_DDLTIME:
                        case KW_IDXPROPERTIES:
                        case KW_IGNORE:
                        case KW_IMPORT:
                        case KW_IN:
                        case KW_INDEX:
                        case KW_INDEXES:
                        case KW_INNER:
                        case KW_INPATH:
                        case KW_INPUTDRIVER:
                        case KW_INPUTFORMAT:
                        case KW_INSERT:
                        case KW_INT:
                        case KW_INTERSECT:
                        case KW_INTO:
                        case KW_IS:
                        case KW_ITEMS:
                        case KW_JAR:
                        case KW_KEYS:
                        case KW_KEY_TYPE:
                        case KW_LATERAL:
                        case KW_LEFT:
                        case KW_LIKE:
                        case KW_LIMIT:
                        case KW_LINES:
                        case KW_LOAD:
                        case KW_LOCAL:
                        case KW_LOCATION:
                        case KW_LOCK:
                        case KW_LOCKS:
                        case KW_LOGICAL:
                        case KW_LONG:
                        case KW_MAPJOIN:
                        case KW_MATERIALIZED:
                        case KW_MINUS:
                        case KW_MSCK:
                        case KW_NONE:
                        case KW_NOSCAN:
                        case KW_NO_DROP:
                        case KW_NULL:
                        case KW_OF:
                        case KW_OFFLINE:
                        case KW_OPTION:
                        case KW_ORDER:
                        case KW_OUT:
                        case KW_OUTER:
                        case KW_OUTPUTDRIVER:
                        case KW_OUTPUTFORMAT:
                        case KW_OVERWRITE:
                        case KW_OWNER:
                        case KW_PARTITION:
                        case KW_PARTITIONED:
                        case KW_PARTITIONS:
                        case KW_PERCENT:
                        case KW_PLUS:
                        case KW_PRETTY:
                        case KW_PRINCIPALS:
                        case KW_PROCEDURE:
                        case KW_PROTECTION:
                        case KW_PURGE:
                        case KW_RANGE:
                        case KW_READ:
                        case KW_READONLY:
                        case KW_READS:
                        case KW_REBUILD:
                        case KW_RECORDREADER:
                        case KW_RECORDWRITER:
                        case KW_REGEXP:
                        case KW_RENAME:
                        case KW_REPAIR:
                        case KW_REPLACE:
                        case KW_RESTRICT:
                        case KW_REVOKE:
                        case KW_REWRITE:
                        case KW_RIGHT:
                        case KW_RLIKE:
                        case KW_ROLE:
                        case KW_ROLES:
                        case KW_ROLLUP:
                        case KW_ROW:
                        case KW_ROWS:
                        case KW_SCHEMA:
                        case KW_SCHEMAS:
                        case KW_SEMI:
                        case KW_SERDE:
                        case KW_SERDEPROPERTIES:
                        case KW_SET:
                        case KW_SETS:
                        case KW_SHARED:
                        case KW_SHOW:
                        case KW_SHOW_DATABASE:
                        case KW_SKEWED:
                        case KW_SMALLINT:
                        case KW_SORT:
                        case KW_SORTED:
                        case KW_SSL:
                        case KW_STATISTICS:
                        case KW_STORED:
                        case KW_STREAMTABLE:
                        case KW_STRING:
                        case KW_STRUCT:
                        case KW_TABLE:
                        case KW_TABLES:
                        case KW_TBLPROPERTIES:
                        case KW_TEMPORARY:
                        case KW_TERMINATED:
                        case KW_TIMESTAMP:
                        case KW_TINYINT:
                        case KW_TOUCH:
                        case KW_TRANSACTIONS:
                        case KW_TRIGGER:
                        case KW_TRUE:
                        case KW_TRUNCATE:
                        case KW_UNARCHIVE:
                        case KW_UNDO:
                        case KW_UNION:
                        case KW_UNIONTYPE:
                        case KW_UNLOCK:
                        case KW_UNSET:
                        case KW_UNSIGNED:
                        case KW_UPDATE:
                        case KW_USE:
                        case KW_USER:
                        case KW_USING:
                        case KW_UTC:
                        case KW_UTCTIMESTAMP:
                        case KW_VALUES:
                        case KW_VALUE_TYPE:
                        case KW_VIEW:
                        case KW_WHILE:
                        case KW_WITH:
                            {
                            alt141=1;
                            }
                            break;
                        case KW_TO:
                            {
                            switch ( input.LA(3) ) {
                                case COMMA:
                                case KW_TO:
                                    {
                                    alt141=1;
                                    }
                                    break;
                            }

                            }
                            break;
                    }

                    }
                    break;
            }

            switch (alt141) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:16: KW_ROLE
                    {
                    KW_ROLE481=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_grantRole7588);  
                    stream_KW_ROLE.add(KW_ROLE481);


                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_grantRole7591);
            identifier482=identifier();

            state._fsp--;

            stream_identifier.add(identifier482.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:36: ( COMMA identifier )*
            loop142:
            do {
                int alt142=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt142=1;
                    }
                    break;

                }

                switch (alt142) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:37: COMMA identifier
            	    {
            	    COMMA483=(Token)match(input,COMMA,FOLLOW_COMMA_in_grantRole7594);  
            	    stream_COMMA.add(COMMA483);


            	    pushFollow(FOLLOW_identifier_in_grantRole7596);
            	    identifier484=identifier();

            	    state._fsp--;

            	    stream_identifier.add(identifier484.getTree());

            	    }
            	    break;

            	default :
            	    break loop142;
                }
            } while (true);


            KW_TO485=(Token)match(input,KW_TO,FOLLOW_KW_TO_in_grantRole7600);  
            stream_KW_TO.add(KW_TO485);


            pushFollow(FOLLOW_principalSpecification_in_grantRole7602);
            principalSpecification486=principalSpecification();

            state._fsp--;

            stream_principalSpecification.add(principalSpecification486.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:85: ( withAdminOption )?
            int alt143=2;
            switch ( input.LA(1) ) {
                case KW_WITH:
                    {
                    alt143=1;
                    }
                    break;
            }

            switch (alt143) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1416:85: withAdminOption
                    {
                    pushFollow(FOLLOW_withAdminOption_in_grantRole7604);
                    withAdminOption487=withAdminOption();

                    state._fsp--;

                    stream_withAdminOption.add(withAdminOption487.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: principalSpecification, identifier, withAdminOption
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1417:5: -> ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1417:8: ^( TOK_GRANT_ROLE principalSpecification ( withAdminOption )? ( identifier )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_GRANT_ROLE, "TOK_GRANT_ROLE")
                , root_1);

                adaptor.addChild(root_1, stream_principalSpecification.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1417:48: ( withAdminOption )?
                if ( stream_withAdminOption.hasNext() ) {
                    adaptor.addChild(root_1, stream_withAdminOption.nextTree());

                }
                stream_withAdminOption.reset();

                if ( !(stream_identifier.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_identifier.hasNext() ) {
                    adaptor.addChild(root_1, stream_identifier.nextTree());

                }
                stream_identifier.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "grantRole"


    public static class revokeRole_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "revokeRole"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1420:1: revokeRole : KW_REVOKE ( adminOptionFor )? ( KW_ROLE )? identifier ( COMMA identifier )* KW_FROM principalSpecification -> ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ ) ;
    public final HiveParser.revokeRole_return revokeRole() throws RecognitionException {
        HiveParser.revokeRole_return retval = new HiveParser.revokeRole_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_REVOKE488=null;
        Token KW_ROLE490=null;
        Token COMMA492=null;
        Token KW_FROM494=null;
        HiveParser.adminOptionFor_return adminOptionFor489 =null;

        HiveParser_IdentifiersParser.identifier_return identifier491 =null;

        HiveParser_IdentifiersParser.identifier_return identifier493 =null;

        HiveParser.principalSpecification_return principalSpecification495 =null;


        CommonTree KW_REVOKE488_tree=null;
        CommonTree KW_ROLE490_tree=null;
        CommonTree COMMA492_tree=null;
        CommonTree KW_FROM494_tree=null;
        RewriteRuleTokenStream stream_KW_REVOKE=new RewriteRuleTokenStream(adaptor,"token KW_REVOKE");
        RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
        RewriteRuleSubtreeStream stream_adminOptionFor=new RewriteRuleSubtreeStream(adaptor,"rule adminOptionFor");
        RewriteRuleSubtreeStream stream_principalSpecification=new RewriteRuleSubtreeStream(adaptor,"rule principalSpecification");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        pushMsg("revoke role", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:5: ( KW_REVOKE ( adminOptionFor )? ( KW_ROLE )? identifier ( COMMA identifier )* KW_FROM principalSpecification -> ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:7: KW_REVOKE ( adminOptionFor )? ( KW_ROLE )? identifier ( COMMA identifier )* KW_FROM principalSpecification
            {
            KW_REVOKE488=(Token)match(input,KW_REVOKE,FOLLOW_KW_REVOKE_in_revokeRole7650);  
            stream_KW_REVOKE.add(KW_REVOKE488);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:17: ( adminOptionFor )?
            int alt144=2;
            switch ( input.LA(1) ) {
                case KW_ADMIN:
                    {
                    switch ( input.LA(2) ) {
                        case KW_OPTION:
                            {
                            alt144=1;
                            }
                            break;
                    }

                    }
                    break;
            }

            switch (alt144) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:17: adminOptionFor
                    {
                    pushFollow(FOLLOW_adminOptionFor_in_revokeRole7652);
                    adminOptionFor489=adminOptionFor();

                    state._fsp--;

                    stream_adminOptionFor.add(adminOptionFor489.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:33: ( KW_ROLE )?
            int alt145=2;
            switch ( input.LA(1) ) {
                case KW_ROLE:
                    {
                    switch ( input.LA(2) ) {
                        case Identifier:
                        case KW_ADD:
                        case KW_ADMIN:
                        case KW_AFTER:
                        case KW_ALL:
                        case KW_ALTER:
                        case KW_ANALYZE:
                        case KW_ARCHIVE:
                        case KW_ARRAY:
                        case KW_AS:
                        case KW_ASC:
                        case KW_AUTHORIZATION:
                        case KW_BEFORE:
                        case KW_BETWEEN:
                        case KW_BIGINT:
                        case KW_BINARY:
                        case KW_BOOLEAN:
                        case KW_BOTH:
                        case KW_BUCKET:
                        case KW_BUCKETS:
                        case KW_BY:
                        case KW_CASCADE:
                        case KW_CHANGE:
                        case KW_CLUSTER:
                        case KW_CLUSTERED:
                        case KW_CLUSTERSTATUS:
                        case KW_COLLECTION:
                        case KW_COLUMNS:
                        case KW_COMMENT:
                        case KW_COMPACT:
                        case KW_COMPACTIONS:
                        case KW_COMPUTE:
                        case KW_CONCATENATE:
                        case KW_CONTINUE:
                        case KW_CREATE:
                        case KW_CUBE:
                        case KW_CURSOR:
                        case KW_DATA:
                        case KW_DATABASES:
                        case KW_DATE:
                        case KW_DATETIME:
                        case KW_DBPROPERTIES:
                        case KW_DECIMAL:
                        case KW_DEFAULT:
                        case KW_DEFERRED:
                        case KW_DEFINED:
                        case KW_DELETE:
                        case KW_DELIMITED:
                        case KW_DEPENDENCY:
                        case KW_DESC:
                        case KW_DESCRIBE:
                        case KW_DIRECTORIES:
                        case KW_DIRECTORY:
                        case KW_DISABLE:
                        case KW_DISTRIBUTE:
                        case KW_DOUBLE:
                        case KW_DROP:
                        case KW_ELEM_TYPE:
                        case KW_ENABLE:
                        case KW_ESCAPED:
                        case KW_EXCLUSIVE:
                        case KW_EXISTS:
                        case KW_EXPLAIN:
                        case KW_EXPORT:
                        case KW_EXTERNAL:
                        case KW_FALSE:
                        case KW_FETCH:
                        case KW_FIELDS:
                        case KW_FILE:
                        case KW_FILEFORMAT:
                        case KW_FIRST:
                        case KW_FLOAT:
                        case KW_FOR:
                        case KW_FORMAT:
                        case KW_FORMATTED:
                        case KW_FULL:
                        case KW_FUNCTIONS:
                        case KW_GRANT:
                        case KW_GROUP:
                        case KW_GROUPING:
                        case KW_HOLD_DDLTIME:
                        case KW_IDXPROPERTIES:
                        case KW_IGNORE:
                        case KW_IMPORT:
                        case KW_IN:
                        case KW_INDEX:
                        case KW_INDEXES:
                        case KW_INNER:
                        case KW_INPATH:
                        case KW_INPUTDRIVER:
                        case KW_INPUTFORMAT:
                        case KW_INSERT:
                        case KW_INT:
                        case KW_INTERSECT:
                        case KW_INTO:
                        case KW_IS:
                        case KW_ITEMS:
                        case KW_JAR:
                        case KW_KEYS:
                        case KW_KEY_TYPE:
                        case KW_LATERAL:
                        case KW_LEFT:
                        case KW_LIKE:
                        case KW_LIMIT:
                        case KW_LINES:
                        case KW_LOAD:
                        case KW_LOCAL:
                        case KW_LOCATION:
                        case KW_LOCK:
                        case KW_LOCKS:
                        case KW_LOGICAL:
                        case KW_LONG:
                        case KW_MAPJOIN:
                        case KW_MATERIALIZED:
                        case KW_MINUS:
                        case KW_MSCK:
                        case KW_NONE:
                        case KW_NOSCAN:
                        case KW_NO_DROP:
                        case KW_NULL:
                        case KW_OF:
                        case KW_OFFLINE:
                        case KW_OPTION:
                        case KW_ORDER:
                        case KW_OUT:
                        case KW_OUTER:
                        case KW_OUTPUTDRIVER:
                        case KW_OUTPUTFORMAT:
                        case KW_OVERWRITE:
                        case KW_OWNER:
                        case KW_PARTITION:
                        case KW_PARTITIONED:
                        case KW_PARTITIONS:
                        case KW_PERCENT:
                        case KW_PLUS:
                        case KW_PRETTY:
                        case KW_PRINCIPALS:
                        case KW_PROCEDURE:
                        case KW_PROTECTION:
                        case KW_PURGE:
                        case KW_RANGE:
                        case KW_READ:
                        case KW_READONLY:
                        case KW_READS:
                        case KW_REBUILD:
                        case KW_RECORDREADER:
                        case KW_RECORDWRITER:
                        case KW_REGEXP:
                        case KW_RENAME:
                        case KW_REPAIR:
                        case KW_REPLACE:
                        case KW_RESTRICT:
                        case KW_REVOKE:
                        case KW_REWRITE:
                        case KW_RIGHT:
                        case KW_RLIKE:
                        case KW_ROLE:
                        case KW_ROLES:
                        case KW_ROLLUP:
                        case KW_ROW:
                        case KW_ROWS:
                        case KW_SCHEMA:
                        case KW_SCHEMAS:
                        case KW_SEMI:
                        case KW_SERDE:
                        case KW_SERDEPROPERTIES:
                        case KW_SET:
                        case KW_SETS:
                        case KW_SHARED:
                        case KW_SHOW:
                        case KW_SHOW_DATABASE:
                        case KW_SKEWED:
                        case KW_SMALLINT:
                        case KW_SORT:
                        case KW_SORTED:
                        case KW_SSL:
                        case KW_STATISTICS:
                        case KW_STORED:
                        case KW_STREAMTABLE:
                        case KW_STRING:
                        case KW_STRUCT:
                        case KW_TABLE:
                        case KW_TABLES:
                        case KW_TBLPROPERTIES:
                        case KW_TEMPORARY:
                        case KW_TERMINATED:
                        case KW_TIMESTAMP:
                        case KW_TINYINT:
                        case KW_TO:
                        case KW_TOUCH:
                        case KW_TRANSACTIONS:
                        case KW_TRIGGER:
                        case KW_TRUE:
                        case KW_TRUNCATE:
                        case KW_UNARCHIVE:
                        case KW_UNDO:
                        case KW_UNION:
                        case KW_UNIONTYPE:
                        case KW_UNLOCK:
                        case KW_UNSET:
                        case KW_UNSIGNED:
                        case KW_UPDATE:
                        case KW_USE:
                        case KW_USER:
                        case KW_USING:
                        case KW_UTC:
                        case KW_UTCTIMESTAMP:
                        case KW_VALUES:
                        case KW_VALUE_TYPE:
                        case KW_VIEW:
                        case KW_WHILE:
                        case KW_WITH:
                            {
                            alt145=1;
                            }
                            break;
                    }

                    }
                    break;
            }

            switch (alt145) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:33: KW_ROLE
                    {
                    KW_ROLE490=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_revokeRole7655);  
                    stream_KW_ROLE.add(KW_ROLE490);


                    }
                    break;

            }


            pushFollow(FOLLOW_identifier_in_revokeRole7658);
            identifier491=identifier();

            state._fsp--;

            stream_identifier.add(identifier491.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:53: ( COMMA identifier )*
            loop146:
            do {
                int alt146=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt146=1;
                    }
                    break;

                }

                switch (alt146) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1423:54: COMMA identifier
            	    {
            	    COMMA492=(Token)match(input,COMMA,FOLLOW_COMMA_in_revokeRole7661);  
            	    stream_COMMA.add(COMMA492);


            	    pushFollow(FOLLOW_identifier_in_revokeRole7663);
            	    identifier493=identifier();

            	    state._fsp--;

            	    stream_identifier.add(identifier493.getTree());

            	    }
            	    break;

            	default :
            	    break loop146;
                }
            } while (true);


            KW_FROM494=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_revokeRole7667);  
            stream_KW_FROM.add(KW_FROM494);


            pushFollow(FOLLOW_principalSpecification_in_revokeRole7669);
            principalSpecification495=principalSpecification();

            state._fsp--;

            stream_principalSpecification.add(principalSpecification495.getTree());

            // AST REWRITE
            // elements: principalSpecification, identifier, adminOptionFor
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1424:5: -> ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1424:8: ^( TOK_REVOKE_ROLE principalSpecification ( adminOptionFor )? ( identifier )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_REVOKE_ROLE, "TOK_REVOKE_ROLE")
                , root_1);

                adaptor.addChild(root_1, stream_principalSpecification.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1424:49: ( adminOptionFor )?
                if ( stream_adminOptionFor.hasNext() ) {
                    adaptor.addChild(root_1, stream_adminOptionFor.nextTree());

                }
                stream_adminOptionFor.reset();

                if ( !(stream_identifier.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_identifier.hasNext() ) {
                    adaptor.addChild(root_1, stream_identifier.nextTree());

                }
                stream_identifier.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "revokeRole"


    public static class showRoleGrants_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showRoleGrants"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1427:1: showRoleGrants : KW_SHOW KW_ROLE KW_GRANT principalName -> ^( TOK_SHOW_ROLE_GRANT principalName ) ;
    public final HiveParser.showRoleGrants_return showRoleGrants() throws RecognitionException {
        HiveParser.showRoleGrants_return retval = new HiveParser.showRoleGrants_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SHOW496=null;
        Token KW_ROLE497=null;
        Token KW_GRANT498=null;
        HiveParser.principalName_return principalName499 =null;


        CommonTree KW_SHOW496_tree=null;
        CommonTree KW_ROLE497_tree=null;
        CommonTree KW_GRANT498_tree=null;
        RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
        RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
        RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
        RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");
        pushMsg("show role grants", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1430:5: ( KW_SHOW KW_ROLE KW_GRANT principalName -> ^( TOK_SHOW_ROLE_GRANT principalName ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1430:7: KW_SHOW KW_ROLE KW_GRANT principalName
            {
            KW_SHOW496=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showRoleGrants7714);  
            stream_KW_SHOW.add(KW_SHOW496);


            KW_ROLE497=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_showRoleGrants7716);  
            stream_KW_ROLE.add(KW_ROLE497);


            KW_GRANT498=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_showRoleGrants7718);  
            stream_KW_GRANT.add(KW_GRANT498);


            pushFollow(FOLLOW_principalName_in_showRoleGrants7720);
            principalName499=principalName();

            state._fsp--;

            stream_principalName.add(principalName499.getTree());

            // AST REWRITE
            // elements: principalName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1431:5: -> ^( TOK_SHOW_ROLE_GRANT principalName )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1431:8: ^( TOK_SHOW_ROLE_GRANT principalName )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SHOW_ROLE_GRANT, "TOK_SHOW_ROLE_GRANT")
                , root_1);

                adaptor.addChild(root_1, stream_principalName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showRoleGrants"


    public static class showRoles_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showRoles"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1435:1: showRoles : KW_SHOW KW_ROLES -> ^( TOK_SHOW_ROLES ) ;
    public final HiveParser.showRoles_return showRoles() throws RecognitionException {
        HiveParser.showRoles_return retval = new HiveParser.showRoles_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SHOW500=null;
        Token KW_ROLES501=null;

        CommonTree KW_SHOW500_tree=null;
        CommonTree KW_ROLES501_tree=null;
        RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
        RewriteRuleTokenStream stream_KW_ROLES=new RewriteRuleTokenStream(adaptor,"token KW_ROLES");

        pushMsg("show roles", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1438:5: ( KW_SHOW KW_ROLES -> ^( TOK_SHOW_ROLES ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1438:7: KW_SHOW KW_ROLES
            {
            KW_SHOW500=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showRoles7760);  
            stream_KW_SHOW.add(KW_SHOW500);


            KW_ROLES501=(Token)match(input,KW_ROLES,FOLLOW_KW_ROLES_in_showRoles7762);  
            stream_KW_ROLES.add(KW_ROLES501);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1439:5: -> ^( TOK_SHOW_ROLES )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1439:8: ^( TOK_SHOW_ROLES )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SHOW_ROLES, "TOK_SHOW_ROLES")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showRoles"


    public static class showCurrentRole_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showCurrentRole"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1442:1: showCurrentRole : KW_SHOW KW_CURRENT KW_ROLES -> ^( TOK_SHOW_SET_ROLE ) ;
    public final HiveParser.showCurrentRole_return showCurrentRole() throws RecognitionException {
        HiveParser.showCurrentRole_return retval = new HiveParser.showCurrentRole_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SHOW502=null;
        Token KW_CURRENT503=null;
        Token KW_ROLES504=null;

        CommonTree KW_SHOW502_tree=null;
        CommonTree KW_CURRENT503_tree=null;
        CommonTree KW_ROLES504_tree=null;
        RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
        RewriteRuleTokenStream stream_KW_CURRENT=new RewriteRuleTokenStream(adaptor,"token KW_CURRENT");
        RewriteRuleTokenStream stream_KW_ROLES=new RewriteRuleTokenStream(adaptor,"token KW_ROLES");

        pushMsg("show current role", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1445:5: ( KW_SHOW KW_CURRENT KW_ROLES -> ^( TOK_SHOW_SET_ROLE ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1445:7: KW_SHOW KW_CURRENT KW_ROLES
            {
            KW_SHOW502=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showCurrentRole7799);  
            stream_KW_SHOW.add(KW_SHOW502);


            KW_CURRENT503=(Token)match(input,KW_CURRENT,FOLLOW_KW_CURRENT_in_showCurrentRole7801);  
            stream_KW_CURRENT.add(KW_CURRENT503);


            KW_ROLES504=(Token)match(input,KW_ROLES,FOLLOW_KW_ROLES_in_showCurrentRole7803);  
            stream_KW_ROLES.add(KW_ROLES504);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1446:5: -> ^( TOK_SHOW_SET_ROLE )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1446:8: ^( TOK_SHOW_SET_ROLE )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SHOW_SET_ROLE, "TOK_SHOW_SET_ROLE")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showCurrentRole"


    public static class setRole_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "setRole"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1449:1: setRole : KW_SET KW_ROLE roleName= identifier -> ^( TOK_SHOW_SET_ROLE $roleName) ;
    public final HiveParser.setRole_return setRole() throws RecognitionException {
        HiveParser.setRole_return retval = new HiveParser.setRole_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET505=null;
        Token KW_ROLE506=null;
        HiveParser_IdentifiersParser.identifier_return roleName =null;


        CommonTree KW_SET505_tree=null;
        CommonTree KW_ROLE506_tree=null;
        RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        pushMsg("set role", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1452:5: ( KW_SET KW_ROLE roleName= identifier -> ^( TOK_SHOW_SET_ROLE $roleName) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1452:7: KW_SET KW_ROLE roleName= identifier
            {
            KW_SET505=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_setRole7840);  
            stream_KW_SET.add(KW_SET505);


            KW_ROLE506=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_setRole7842);  
            stream_KW_ROLE.add(KW_ROLE506);


            pushFollow(FOLLOW_identifier_in_setRole7846);
            roleName=identifier();

            state._fsp--;

            stream_identifier.add(roleName.getTree());

            // AST REWRITE
            // elements: roleName
            // token labels: 
            // rule labels: retval, roleName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_roleName=new RewriteRuleSubtreeStream(adaptor,"rule roleName",roleName!=null?roleName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1453:5: -> ^( TOK_SHOW_SET_ROLE $roleName)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1453:8: ^( TOK_SHOW_SET_ROLE $roleName)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SHOW_SET_ROLE, "TOK_SHOW_SET_ROLE")
                , root_1);

                adaptor.addChild(root_1, stream_roleName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "setRole"


    public static class showGrants_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showGrants"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1456:1: showGrants : KW_SHOW KW_GRANT ( principalName )? ( KW_ON privilegeIncludeColObject )? -> ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? ) ;
    public final HiveParser.showGrants_return showGrants() throws RecognitionException {
        HiveParser.showGrants_return retval = new HiveParser.showGrants_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SHOW507=null;
        Token KW_GRANT508=null;
        Token KW_ON510=null;
        HiveParser.principalName_return principalName509 =null;

        HiveParser.privilegeIncludeColObject_return privilegeIncludeColObject511 =null;


        CommonTree KW_SHOW507_tree=null;
        CommonTree KW_GRANT508_tree=null;
        CommonTree KW_ON510_tree=null;
        RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
        RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");
        RewriteRuleSubtreeStream stream_privilegeIncludeColObject=new RewriteRuleSubtreeStream(adaptor,"rule privilegeIncludeColObject");
        pushMsg("show grants", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1459:5: ( KW_SHOW KW_GRANT ( principalName )? ( KW_ON privilegeIncludeColObject )? -> ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1459:7: KW_SHOW KW_GRANT ( principalName )? ( KW_ON privilegeIncludeColObject )?
            {
            KW_SHOW507=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showGrants7886);  
            stream_KW_SHOW.add(KW_SHOW507);


            KW_GRANT508=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_showGrants7888);  
            stream_KW_GRANT.add(KW_GRANT508);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1459:24: ( principalName )?
            int alt147=2;
            switch ( input.LA(1) ) {
                case KW_GROUP:
                case KW_ROLE:
                case KW_USER:
                    {
                    alt147=1;
                    }
                    break;
            }

            switch (alt147) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1459:24: principalName
                    {
                    pushFollow(FOLLOW_principalName_in_showGrants7890);
                    principalName509=principalName();

                    state._fsp--;

                    stream_principalName.add(principalName509.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1459:39: ( KW_ON privilegeIncludeColObject )?
            int alt148=2;
            switch ( input.LA(1) ) {
                case KW_ON:
                    {
                    alt148=1;
                    }
                    break;
            }

            switch (alt148) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1459:40: KW_ON privilegeIncludeColObject
                    {
                    KW_ON510=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_showGrants7894);  
                    stream_KW_ON.add(KW_ON510);


                    pushFollow(FOLLOW_privilegeIncludeColObject_in_showGrants7896);
                    privilegeIncludeColObject511=privilegeIncludeColObject();

                    state._fsp--;

                    stream_privilegeIncludeColObject.add(privilegeIncludeColObject511.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: principalName, privilegeIncludeColObject
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1460:5: -> ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1460:8: ^( TOK_SHOW_GRANT ( principalName )? ( privilegeIncludeColObject )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SHOW_GRANT, "TOK_SHOW_GRANT")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1460:25: ( principalName )?
                if ( stream_principalName.hasNext() ) {
                    adaptor.addChild(root_1, stream_principalName.nextTree());

                }
                stream_principalName.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1460:40: ( privilegeIncludeColObject )?
                if ( stream_privilegeIncludeColObject.hasNext() ) {
                    adaptor.addChild(root_1, stream_privilegeIncludeColObject.nextTree());

                }
                stream_privilegeIncludeColObject.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showGrants"


    public static class showRolePrincipals_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showRolePrincipals"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1463:1: showRolePrincipals : KW_SHOW KW_PRINCIPALS roleName= identifier -> ^( TOK_SHOW_ROLE_PRINCIPALS $roleName) ;
    public final HiveParser.showRolePrincipals_return showRolePrincipals() throws RecognitionException {
        HiveParser.showRolePrincipals_return retval = new HiveParser.showRolePrincipals_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SHOW512=null;
        Token KW_PRINCIPALS513=null;
        HiveParser_IdentifiersParser.identifier_return roleName =null;


        CommonTree KW_SHOW512_tree=null;
        CommonTree KW_PRINCIPALS513_tree=null;
        RewriteRuleTokenStream stream_KW_SHOW=new RewriteRuleTokenStream(adaptor,"token KW_SHOW");
        RewriteRuleTokenStream stream_KW_PRINCIPALS=new RewriteRuleTokenStream(adaptor,"token KW_PRINCIPALS");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        pushMsg("show role principals", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1466:5: ( KW_SHOW KW_PRINCIPALS roleName= identifier -> ^( TOK_SHOW_ROLE_PRINCIPALS $roleName) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1466:7: KW_SHOW KW_PRINCIPALS roleName= identifier
            {
            KW_SHOW512=(Token)match(input,KW_SHOW,FOLLOW_KW_SHOW_in_showRolePrincipals7941);  
            stream_KW_SHOW.add(KW_SHOW512);


            KW_PRINCIPALS513=(Token)match(input,KW_PRINCIPALS,FOLLOW_KW_PRINCIPALS_in_showRolePrincipals7943);  
            stream_KW_PRINCIPALS.add(KW_PRINCIPALS513);


            pushFollow(FOLLOW_identifier_in_showRolePrincipals7947);
            roleName=identifier();

            state._fsp--;

            stream_identifier.add(roleName.getTree());

            // AST REWRITE
            // elements: roleName
            // token labels: 
            // rule labels: retval, roleName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_roleName=new RewriteRuleSubtreeStream(adaptor,"rule roleName",roleName!=null?roleName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1467:5: -> ^( TOK_SHOW_ROLE_PRINCIPALS $roleName)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1467:8: ^( TOK_SHOW_ROLE_PRINCIPALS $roleName)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SHOW_ROLE_PRINCIPALS, "TOK_SHOW_ROLE_PRINCIPALS")
                , root_1);

                adaptor.addChild(root_1, stream_roleName.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showRolePrincipals"


    public static class privilegeIncludeColObject_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "privilegeIncludeColObject"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1471:1: privilegeIncludeColObject : ( KW_ALL -> ^( TOK_RESOURCE_ALL ) | privObjectCols -> ^( TOK_PRIV_OBJECT_COL privObjectCols ) );
    public final HiveParser.privilegeIncludeColObject_return privilegeIncludeColObject() throws RecognitionException {
        HiveParser.privilegeIncludeColObject_return retval = new HiveParser.privilegeIncludeColObject_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ALL514=null;
        HiveParser.privObjectCols_return privObjectCols515 =null;


        CommonTree KW_ALL514_tree=null;
        RewriteRuleTokenStream stream_KW_ALL=new RewriteRuleTokenStream(adaptor,"token KW_ALL");
        RewriteRuleSubtreeStream stream_privObjectCols=new RewriteRuleSubtreeStream(adaptor,"rule privObjectCols");
        pushMsg("privilege object including columns", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1474:5: ( KW_ALL -> ^( TOK_RESOURCE_ALL ) | privObjectCols -> ^( TOK_PRIV_OBJECT_COL privObjectCols ) )
            int alt149=2;
            switch ( input.LA(1) ) {
            case KW_ALL:
                {
                alt149=1;
                }
                break;
            case Identifier:
            case KW_ADD:
            case KW_ADMIN:
            case KW_AFTER:
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_ARCHIVE:
            case KW_ARRAY:
            case KW_AS:
            case KW_ASC:
            case KW_AUTHORIZATION:
            case KW_BEFORE:
            case KW_BETWEEN:
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_BOTH:
            case KW_BUCKET:
            case KW_BUCKETS:
            case KW_BY:
            case KW_CASCADE:
            case KW_CHANGE:
            case KW_CLUSTER:
            case KW_CLUSTERED:
            case KW_CLUSTERSTATUS:
            case KW_COLLECTION:
            case KW_COLUMNS:
            case KW_COMMENT:
            case KW_COMPACT:
            case KW_COMPACTIONS:
            case KW_COMPUTE:
            case KW_CONCATENATE:
            case KW_CONTINUE:
            case KW_CREATE:
            case KW_CUBE:
            case KW_CURSOR:
            case KW_DATA:
            case KW_DATABASE:
            case KW_DATABASES:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DBPROPERTIES:
            case KW_DECIMAL:
            case KW_DEFAULT:
            case KW_DEFERRED:
            case KW_DEFINED:
            case KW_DELETE:
            case KW_DELIMITED:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DIRECTORIES:
            case KW_DIRECTORY:
            case KW_DISABLE:
            case KW_DISTRIBUTE:
            case KW_DOUBLE:
            case KW_DROP:
            case KW_ELEM_TYPE:
            case KW_ENABLE:
            case KW_ESCAPED:
            case KW_EXCLUSIVE:
            case KW_EXISTS:
            case KW_EXPLAIN:
            case KW_EXPORT:
            case KW_EXTERNAL:
            case KW_FALSE:
            case KW_FETCH:
            case KW_FIELDS:
            case KW_FILE:
            case KW_FILEFORMAT:
            case KW_FIRST:
            case KW_FLOAT:
            case KW_FOR:
            case KW_FORMAT:
            case KW_FORMATTED:
            case KW_FULL:
            case KW_FUNCTIONS:
            case KW_GRANT:
            case KW_GROUP:
            case KW_GROUPING:
            case KW_HOLD_DDLTIME:
            case KW_IDXPROPERTIES:
            case KW_IGNORE:
            case KW_IMPORT:
            case KW_IN:
            case KW_INDEX:
            case KW_INDEXES:
            case KW_INNER:
            case KW_INPATH:
            case KW_INPUTDRIVER:
            case KW_INPUTFORMAT:
            case KW_INSERT:
            case KW_INT:
            case KW_INTERSECT:
            case KW_INTO:
            case KW_IS:
            case KW_ITEMS:
            case KW_JAR:
            case KW_KEYS:
            case KW_KEY_TYPE:
            case KW_LATERAL:
            case KW_LEFT:
            case KW_LIKE:
            case KW_LIMIT:
            case KW_LINES:
            case KW_LOAD:
            case KW_LOCAL:
            case KW_LOCATION:
            case KW_LOCK:
            case KW_LOCKS:
            case KW_LOGICAL:
            case KW_LONG:
            case KW_MAPJOIN:
            case KW_MATERIALIZED:
            case KW_MINUS:
            case KW_MSCK:
            case KW_NONE:
            case KW_NOSCAN:
            case KW_NO_DROP:
            case KW_NULL:
            case KW_OF:
            case KW_OFFLINE:
            case KW_OPTION:
            case KW_ORDER:
            case KW_OUT:
            case KW_OUTER:
            case KW_OUTPUTDRIVER:
            case KW_OUTPUTFORMAT:
            case KW_OVERWRITE:
            case KW_OWNER:
            case KW_PARTITION:
            case KW_PARTITIONED:
            case KW_PARTITIONS:
            case KW_PERCENT:
            case KW_PLUS:
            case KW_PRETTY:
            case KW_PRINCIPALS:
            case KW_PROCEDURE:
            case KW_PROTECTION:
            case KW_PURGE:
            case KW_RANGE:
            case KW_READ:
            case KW_READONLY:
            case KW_READS:
            case KW_REBUILD:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REGEXP:
            case KW_RENAME:
            case KW_REPAIR:
            case KW_REPLACE:
            case KW_RESTRICT:
            case KW_REVOKE:
            case KW_REWRITE:
            case KW_RIGHT:
            case KW_RLIKE:
            case KW_ROLE:
            case KW_ROLES:
            case KW_ROLLUP:
            case KW_ROW:
            case KW_ROWS:
            case KW_SCHEMA:
            case KW_SCHEMAS:
            case KW_SEMI:
            case KW_SERDE:
            case KW_SERDEPROPERTIES:
            case KW_SET:
            case KW_SETS:
            case KW_SHARED:
            case KW_SHOW:
            case KW_SHOW_DATABASE:
            case KW_SKEWED:
            case KW_SMALLINT:
            case KW_SORT:
            case KW_SORTED:
            case KW_SSL:
            case KW_STATISTICS:
            case KW_STORED:
            case KW_STREAMTABLE:
            case KW_STRING:
            case KW_STRUCT:
            case KW_TABLE:
            case KW_TABLES:
            case KW_TBLPROPERTIES:
            case KW_TEMPORARY:
            case KW_TERMINATED:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_TO:
            case KW_TOUCH:
            case KW_TRANSACTIONS:
            case KW_TRIGGER:
            case KW_TRUE:
            case KW_TRUNCATE:
            case KW_UNARCHIVE:
            case KW_UNDO:
            case KW_UNION:
            case KW_UNIONTYPE:
            case KW_UNLOCK:
            case KW_UNSET:
            case KW_UNSIGNED:
            case KW_UPDATE:
            case KW_USE:
            case KW_USER:
            case KW_USING:
            case KW_UTC:
            case KW_UTCTIMESTAMP:
            case KW_VALUES:
            case KW_VALUE_TYPE:
            case KW_VIEW:
            case KW_WHILE:
            case KW_WITH:
                {
                alt149=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 149, 0, input);

                throw nvae;

            }

            switch (alt149) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1474:7: KW_ALL
                    {
                    KW_ALL514=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_privilegeIncludeColObject7988);  
                    stream_KW_ALL.add(KW_ALL514);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1474:14: -> ^( TOK_RESOURCE_ALL )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1474:17: ^( TOK_RESOURCE_ALL )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_RESOURCE_ALL, "TOK_RESOURCE_ALL")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1475:7: privObjectCols
                    {
                    pushFollow(FOLLOW_privObjectCols_in_privilegeIncludeColObject8002);
                    privObjectCols515=privObjectCols();

                    state._fsp--;

                    stream_privObjectCols.add(privObjectCols515.getTree());

                    // AST REWRITE
                    // elements: privObjectCols
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1475:22: -> ^( TOK_PRIV_OBJECT_COL privObjectCols )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1475:25: ^( TOK_PRIV_OBJECT_COL privObjectCols )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_OBJECT_COL, "TOK_PRIV_OBJECT_COL")
                        , root_1);

                        adaptor.addChild(root_1, stream_privObjectCols.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "privilegeIncludeColObject"


    public static class privilegeObject_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "privilegeObject"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1478:1: privilegeObject : KW_ON privObject -> ^( TOK_PRIV_OBJECT privObject ) ;
    public final HiveParser.privilegeObject_return privilegeObject() throws RecognitionException {
        HiveParser.privilegeObject_return retval = new HiveParser.privilegeObject_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ON516=null;
        HiveParser.privObject_return privObject517 =null;


        CommonTree KW_ON516_tree=null;
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleSubtreeStream stream_privObject=new RewriteRuleSubtreeStream(adaptor,"rule privObject");
        pushMsg("privilege object", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1481:5: ( KW_ON privObject -> ^( TOK_PRIV_OBJECT privObject ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1481:7: KW_ON privObject
            {
            KW_ON516=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_privilegeObject8037);  
            stream_KW_ON.add(KW_ON516);


            pushFollow(FOLLOW_privObject_in_privilegeObject8039);
            privObject517=privObject();

            state._fsp--;

            stream_privObject.add(privObject517.getTree());

            // AST REWRITE
            // elements: privObject
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1481:24: -> ^( TOK_PRIV_OBJECT privObject )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1481:27: ^( TOK_PRIV_OBJECT privObject )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_PRIV_OBJECT, "TOK_PRIV_OBJECT")
                , root_1);

                adaptor.addChild(root_1, stream_privObject.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "privilegeObject"


    public static class privObject_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "privObject"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1485:1: privObject : ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( partitionSpec )? ) );
    public final HiveParser.privObject_return privObject() throws RecognitionException {
        HiveParser.privObject_return retval = new HiveParser.privObject_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DATABASE518=null;
        Token KW_SCHEMA519=null;
        Token KW_TABLE521=null;
        HiveParser_IdentifiersParser.identifier_return identifier520 =null;

        HiveParser_FromClauseParser.tableName_return tableName522 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec523 =null;


        CommonTree KW_DATABASE518_tree=null;
        CommonTree KW_SCHEMA519_tree=null;
        CommonTree KW_TABLE521_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1486:5: ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( partitionSpec )? ) )
            int alt153=2;
            switch ( input.LA(1) ) {
            case KW_DATABASE:
                {
                alt153=1;
                }
                break;
            case KW_SCHEMA:
                {
                switch ( input.LA(2) ) {
                case Identifier:
                case KW_ADD:
                case KW_ADMIN:
                case KW_AFTER:
                case KW_ALL:
                case KW_ALTER:
                case KW_ANALYZE:
                case KW_ARCHIVE:
                case KW_ARRAY:
                case KW_AS:
                case KW_ASC:
                case KW_AUTHORIZATION:
                case KW_BEFORE:
                case KW_BETWEEN:
                case KW_BIGINT:
                case KW_BINARY:
                case KW_BOOLEAN:
                case KW_BOTH:
                case KW_BUCKET:
                case KW_BUCKETS:
                case KW_BY:
                case KW_CASCADE:
                case KW_CHANGE:
                case KW_CLUSTER:
                case KW_CLUSTERED:
                case KW_CLUSTERSTATUS:
                case KW_COLLECTION:
                case KW_COLUMNS:
                case KW_COMMENT:
                case KW_COMPACT:
                case KW_COMPACTIONS:
                case KW_COMPUTE:
                case KW_CONCATENATE:
                case KW_CONTINUE:
                case KW_CREATE:
                case KW_CUBE:
                case KW_CURSOR:
                case KW_DATA:
                case KW_DATABASES:
                case KW_DATE:
                case KW_DATETIME:
                case KW_DBPROPERTIES:
                case KW_DECIMAL:
                case KW_DEFAULT:
                case KW_DEFERRED:
                case KW_DEFINED:
                case KW_DELETE:
                case KW_DELIMITED:
                case KW_DEPENDENCY:
                case KW_DESC:
                case KW_DESCRIBE:
                case KW_DIRECTORIES:
                case KW_DIRECTORY:
                case KW_DISABLE:
                case KW_DISTRIBUTE:
                case KW_DOUBLE:
                case KW_DROP:
                case KW_ELEM_TYPE:
                case KW_ENABLE:
                case KW_ESCAPED:
                case KW_EXCLUSIVE:
                case KW_EXISTS:
                case KW_EXPLAIN:
                case KW_EXPORT:
                case KW_EXTERNAL:
                case KW_FALSE:
                case KW_FETCH:
                case KW_FIELDS:
                case KW_FILE:
                case KW_FILEFORMAT:
                case KW_FIRST:
                case KW_FLOAT:
                case KW_FOR:
                case KW_FORMAT:
                case KW_FORMATTED:
                case KW_FULL:
                case KW_FUNCTIONS:
                case KW_GRANT:
                case KW_GROUP:
                case KW_GROUPING:
                case KW_HOLD_DDLTIME:
                case KW_IDXPROPERTIES:
                case KW_IGNORE:
                case KW_IMPORT:
                case KW_IN:
                case KW_INDEX:
                case KW_INDEXES:
                case KW_INNER:
                case KW_INPATH:
                case KW_INPUTDRIVER:
                case KW_INPUTFORMAT:
                case KW_INSERT:
                case KW_INT:
                case KW_INTERSECT:
                case KW_INTO:
                case KW_IS:
                case KW_ITEMS:
                case KW_JAR:
                case KW_KEYS:
                case KW_KEY_TYPE:
                case KW_LATERAL:
                case KW_LEFT:
                case KW_LIKE:
                case KW_LIMIT:
                case KW_LINES:
                case KW_LOAD:
                case KW_LOCAL:
                case KW_LOCATION:
                case KW_LOCK:
                case KW_LOCKS:
                case KW_LOGICAL:
                case KW_LONG:
                case KW_MAPJOIN:
                case KW_MATERIALIZED:
                case KW_MINUS:
                case KW_MSCK:
                case KW_NONE:
                case KW_NOSCAN:
                case KW_NO_DROP:
                case KW_NULL:
                case KW_OF:
                case KW_OFFLINE:
                case KW_OPTION:
                case KW_ORDER:
                case KW_OUT:
                case KW_OUTER:
                case KW_OUTPUTDRIVER:
                case KW_OUTPUTFORMAT:
                case KW_OVERWRITE:
                case KW_OWNER:
                case KW_PARTITIONED:
                case KW_PARTITIONS:
                case KW_PERCENT:
                case KW_PLUS:
                case KW_PRETTY:
                case KW_PRINCIPALS:
                case KW_PROCEDURE:
                case KW_PROTECTION:
                case KW_PURGE:
                case KW_RANGE:
                case KW_READ:
                case KW_READONLY:
                case KW_READS:
                case KW_REBUILD:
                case KW_RECORDREADER:
                case KW_RECORDWRITER:
                case KW_REGEXP:
                case KW_RENAME:
                case KW_REPAIR:
                case KW_REPLACE:
                case KW_RESTRICT:
                case KW_REVOKE:
                case KW_REWRITE:
                case KW_RIGHT:
                case KW_RLIKE:
                case KW_ROLE:
                case KW_ROLES:
                case KW_ROLLUP:
                case KW_ROW:
                case KW_ROWS:
                case KW_SCHEMA:
                case KW_SCHEMAS:
                case KW_SEMI:
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                case KW_SET:
                case KW_SETS:
                case KW_SHARED:
                case KW_SHOW:
                case KW_SHOW_DATABASE:
                case KW_SKEWED:
                case KW_SMALLINT:
                case KW_SORT:
                case KW_SORTED:
                case KW_SSL:
                case KW_STATISTICS:
                case KW_STORED:
                case KW_STREAMTABLE:
                case KW_STRING:
                case KW_STRUCT:
                case KW_TABLE:
                case KW_TABLES:
                case KW_TBLPROPERTIES:
                case KW_TEMPORARY:
                case KW_TERMINATED:
                case KW_TIMESTAMP:
                case KW_TINYINT:
                case KW_TOUCH:
                case KW_TRANSACTIONS:
                case KW_TRIGGER:
                case KW_TRUE:
                case KW_TRUNCATE:
                case KW_UNARCHIVE:
                case KW_UNDO:
                case KW_UNION:
                case KW_UNIONTYPE:
                case KW_UNLOCK:
                case KW_UNSET:
                case KW_UNSIGNED:
                case KW_UPDATE:
                case KW_USE:
                case KW_USER:
                case KW_USING:
                case KW_UTC:
                case KW_UTCTIMESTAMP:
                case KW_VALUES:
                case KW_VALUE_TYPE:
                case KW_VIEW:
                case KW_WHILE:
                case KW_WITH:
                    {
                    alt153=1;
                    }
                    break;
                case KW_PARTITION:
                    {
                    switch ( input.LA(3) ) {
                    case LPAREN:
                        {
                        alt153=2;
                        }
                        break;
                    case KW_FROM:
                    case KW_TO:
                        {
                        alt153=1;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 153, 7, input);

                        throw nvae;

                    }

                    }
                    break;
                case DOT:
                case KW_FROM:
                    {
                    alt153=2;
                    }
                    break;
                case KW_TO:
                    {
                    switch ( input.LA(3) ) {
                    case KW_FROM:
                    case KW_TO:
                        {
                        alt153=1;
                        }
                        break;
                    case KW_GROUP:
                    case KW_ROLE:
                    case KW_USER:
                        {
                        alt153=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 153, 9, input);

                        throw nvae;

                    }

                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 153, 2, input);

                    throw nvae;

                }

                }
                break;
            case Identifier:
            case KW_ADD:
            case KW_ADMIN:
            case KW_AFTER:
            case KW_ALL:
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_ARCHIVE:
            case KW_ARRAY:
            case KW_AS:
            case KW_ASC:
            case KW_AUTHORIZATION:
            case KW_BEFORE:
            case KW_BETWEEN:
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_BOTH:
            case KW_BUCKET:
            case KW_BUCKETS:
            case KW_BY:
            case KW_CASCADE:
            case KW_CHANGE:
            case KW_CLUSTER:
            case KW_CLUSTERED:
            case KW_CLUSTERSTATUS:
            case KW_COLLECTION:
            case KW_COLUMNS:
            case KW_COMMENT:
            case KW_COMPACT:
            case KW_COMPACTIONS:
            case KW_COMPUTE:
            case KW_CONCATENATE:
            case KW_CONTINUE:
            case KW_CREATE:
            case KW_CUBE:
            case KW_CURSOR:
            case KW_DATA:
            case KW_DATABASES:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DBPROPERTIES:
            case KW_DECIMAL:
            case KW_DEFAULT:
            case KW_DEFERRED:
            case KW_DEFINED:
            case KW_DELETE:
            case KW_DELIMITED:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DIRECTORIES:
            case KW_DIRECTORY:
            case KW_DISABLE:
            case KW_DISTRIBUTE:
            case KW_DOUBLE:
            case KW_DROP:
            case KW_ELEM_TYPE:
            case KW_ENABLE:
            case KW_ESCAPED:
            case KW_EXCLUSIVE:
            case KW_EXISTS:
            case KW_EXPLAIN:
            case KW_EXPORT:
            case KW_EXTERNAL:
            case KW_FALSE:
            case KW_FETCH:
            case KW_FIELDS:
            case KW_FILE:
            case KW_FILEFORMAT:
            case KW_FIRST:
            case KW_FLOAT:
            case KW_FOR:
            case KW_FORMAT:
            case KW_FORMATTED:
            case KW_FULL:
            case KW_FUNCTIONS:
            case KW_GRANT:
            case KW_GROUP:
            case KW_GROUPING:
            case KW_HOLD_DDLTIME:
            case KW_IDXPROPERTIES:
            case KW_IGNORE:
            case KW_IMPORT:
            case KW_IN:
            case KW_INDEX:
            case KW_INDEXES:
            case KW_INNER:
            case KW_INPATH:
            case KW_INPUTDRIVER:
            case KW_INPUTFORMAT:
            case KW_INSERT:
            case KW_INT:
            case KW_INTERSECT:
            case KW_INTO:
            case KW_IS:
            case KW_ITEMS:
            case KW_JAR:
            case KW_KEYS:
            case KW_KEY_TYPE:
            case KW_LATERAL:
            case KW_LEFT:
            case KW_LIKE:
            case KW_LIMIT:
            case KW_LINES:
            case KW_LOAD:
            case KW_LOCAL:
            case KW_LOCATION:
            case KW_LOCK:
            case KW_LOCKS:
            case KW_LOGICAL:
            case KW_LONG:
            case KW_MAPJOIN:
            case KW_MATERIALIZED:
            case KW_MINUS:
            case KW_MSCK:
            case KW_NONE:
            case KW_NOSCAN:
            case KW_NO_DROP:
            case KW_NULL:
            case KW_OF:
            case KW_OFFLINE:
            case KW_OPTION:
            case KW_ORDER:
            case KW_OUT:
            case KW_OUTER:
            case KW_OUTPUTDRIVER:
            case KW_OUTPUTFORMAT:
            case KW_OVERWRITE:
            case KW_OWNER:
            case KW_PARTITION:
            case KW_PARTITIONED:
            case KW_PARTITIONS:
            case KW_PERCENT:
            case KW_PLUS:
            case KW_PRETTY:
            case KW_PRINCIPALS:
            case KW_PROCEDURE:
            case KW_PROTECTION:
            case KW_PURGE:
            case KW_RANGE:
            case KW_READ:
            case KW_READONLY:
            case KW_READS:
            case KW_REBUILD:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REGEXP:
            case KW_RENAME:
            case KW_REPAIR:
            case KW_REPLACE:
            case KW_RESTRICT:
            case KW_REVOKE:
            case KW_REWRITE:
            case KW_RIGHT:
            case KW_RLIKE:
            case KW_ROLE:
            case KW_ROLES:
            case KW_ROLLUP:
            case KW_ROW:
            case KW_ROWS:
            case KW_SCHEMAS:
            case KW_SEMI:
            case KW_SERDE:
            case KW_SERDEPROPERTIES:
            case KW_SET:
            case KW_SETS:
            case KW_SHARED:
            case KW_SHOW:
            case KW_SHOW_DATABASE:
            case KW_SKEWED:
            case KW_SMALLINT:
            case KW_SORT:
            case KW_SORTED:
            case KW_SSL:
            case KW_STATISTICS:
            case KW_STORED:
            case KW_STREAMTABLE:
            case KW_STRING:
            case KW_STRUCT:
            case KW_TABLE:
            case KW_TABLES:
            case KW_TBLPROPERTIES:
            case KW_TEMPORARY:
            case KW_TERMINATED:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_TO:
            case KW_TOUCH:
            case KW_TRANSACTIONS:
            case KW_TRIGGER:
            case KW_TRUE:
            case KW_TRUNCATE:
            case KW_UNARCHIVE:
            case KW_UNDO:
            case KW_UNION:
            case KW_UNIONTYPE:
            case KW_UNLOCK:
            case KW_UNSET:
            case KW_UNSIGNED:
            case KW_UPDATE:
            case KW_USE:
            case KW_USER:
            case KW_USING:
            case KW_UTC:
            case KW_UTCTIMESTAMP:
            case KW_VALUES:
            case KW_VALUE_TYPE:
            case KW_VIEW:
            case KW_WHILE:
            case KW_WITH:
                {
                alt153=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 153, 0, input);

                throw nvae;

            }

            switch (alt153) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1486:7: ( KW_DATABASE | KW_SCHEMA ) identifier
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1486:7: ( KW_DATABASE | KW_SCHEMA )
                    int alt150=2;
                    switch ( input.LA(1) ) {
                    case KW_DATABASE:
                        {
                        alt150=1;
                        }
                        break;
                    case KW_SCHEMA:
                        {
                        alt150=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 150, 0, input);

                        throw nvae;

                    }

                    switch (alt150) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1486:8: KW_DATABASE
                            {
                            KW_DATABASE518=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_privObject8066);  
                            stream_KW_DATABASE.add(KW_DATABASE518);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1486:20: KW_SCHEMA
                            {
                            KW_SCHEMA519=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_privObject8068);  
                            stream_KW_SCHEMA.add(KW_SCHEMA519);


                            }
                            break;

                    }


                    pushFollow(FOLLOW_identifier_in_privObject8071);
                    identifier520=identifier();

                    state._fsp--;

                    stream_identifier.add(identifier520.getTree());

                    // AST REWRITE
                    // elements: identifier
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1486:42: -> ^( TOK_DB_TYPE identifier )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1486:45: ^( TOK_DB_TYPE identifier )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DB_TYPE, "TOK_DB_TYPE")
                        , root_1);

                        adaptor.addChild(root_1, stream_identifier.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1487:7: ( KW_TABLE )? tableName ( partitionSpec )?
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1487:7: ( KW_TABLE )?
                    int alt151=2;
                    switch ( input.LA(1) ) {
                        case KW_TABLE:
                            {
                            switch ( input.LA(2) ) {
                                case Identifier:
                                case KW_ADD:
                                case KW_ADMIN:
                                case KW_AFTER:
                                case KW_ALL:
                                case KW_ALTER:
                                case KW_ANALYZE:
                                case KW_ARCHIVE:
                                case KW_ARRAY:
                                case KW_AS:
                                case KW_ASC:
                                case KW_AUTHORIZATION:
                                case KW_BEFORE:
                                case KW_BETWEEN:
                                case KW_BIGINT:
                                case KW_BINARY:
                                case KW_BOOLEAN:
                                case KW_BOTH:
                                case KW_BUCKET:
                                case KW_BUCKETS:
                                case KW_BY:
                                case KW_CASCADE:
                                case KW_CHANGE:
                                case KW_CLUSTER:
                                case KW_CLUSTERED:
                                case KW_CLUSTERSTATUS:
                                case KW_COLLECTION:
                                case KW_COLUMNS:
                                case KW_COMMENT:
                                case KW_COMPACT:
                                case KW_COMPACTIONS:
                                case KW_COMPUTE:
                                case KW_CONCATENATE:
                                case KW_CONTINUE:
                                case KW_CREATE:
                                case KW_CUBE:
                                case KW_CURSOR:
                                case KW_DATA:
                                case KW_DATABASES:
                                case KW_DATE:
                                case KW_DATETIME:
                                case KW_DBPROPERTIES:
                                case KW_DECIMAL:
                                case KW_DEFAULT:
                                case KW_DEFERRED:
                                case KW_DEFINED:
                                case KW_DELETE:
                                case KW_DELIMITED:
                                case KW_DEPENDENCY:
                                case KW_DESC:
                                case KW_DESCRIBE:
                                case KW_DIRECTORIES:
                                case KW_DIRECTORY:
                                case KW_DISABLE:
                                case KW_DISTRIBUTE:
                                case KW_DOUBLE:
                                case KW_DROP:
                                case KW_ELEM_TYPE:
                                case KW_ENABLE:
                                case KW_ESCAPED:
                                case KW_EXCLUSIVE:
                                case KW_EXISTS:
                                case KW_EXPLAIN:
                                case KW_EXPORT:
                                case KW_EXTERNAL:
                                case KW_FALSE:
                                case KW_FETCH:
                                case KW_FIELDS:
                                case KW_FILE:
                                case KW_FILEFORMAT:
                                case KW_FIRST:
                                case KW_FLOAT:
                                case KW_FOR:
                                case KW_FORMAT:
                                case KW_FORMATTED:
                                case KW_FULL:
                                case KW_FUNCTIONS:
                                case KW_GRANT:
                                case KW_GROUP:
                                case KW_GROUPING:
                                case KW_HOLD_DDLTIME:
                                case KW_IDXPROPERTIES:
                                case KW_IGNORE:
                                case KW_IMPORT:
                                case KW_IN:
                                case KW_INDEX:
                                case KW_INDEXES:
                                case KW_INNER:
                                case KW_INPATH:
                                case KW_INPUTDRIVER:
                                case KW_INPUTFORMAT:
                                case KW_INSERT:
                                case KW_INT:
                                case KW_INTERSECT:
                                case KW_INTO:
                                case KW_IS:
                                case KW_ITEMS:
                                case KW_JAR:
                                case KW_KEYS:
                                case KW_KEY_TYPE:
                                case KW_LATERAL:
                                case KW_LEFT:
                                case KW_LIKE:
                                case KW_LIMIT:
                                case KW_LINES:
                                case KW_LOAD:
                                case KW_LOCAL:
                                case KW_LOCATION:
                                case KW_LOCK:
                                case KW_LOCKS:
                                case KW_LOGICAL:
                                case KW_LONG:
                                case KW_MAPJOIN:
                                case KW_MATERIALIZED:
                                case KW_MINUS:
                                case KW_MSCK:
                                case KW_NONE:
                                case KW_NOSCAN:
                                case KW_NO_DROP:
                                case KW_NULL:
                                case KW_OF:
                                case KW_OFFLINE:
                                case KW_OPTION:
                                case KW_ORDER:
                                case KW_OUT:
                                case KW_OUTER:
                                case KW_OUTPUTDRIVER:
                                case KW_OUTPUTFORMAT:
                                case KW_OVERWRITE:
                                case KW_OWNER:
                                case KW_PARTITIONED:
                                case KW_PARTITIONS:
                                case KW_PERCENT:
                                case KW_PLUS:
                                case KW_PRETTY:
                                case KW_PRINCIPALS:
                                case KW_PROCEDURE:
                                case KW_PROTECTION:
                                case KW_PURGE:
                                case KW_RANGE:
                                case KW_READ:
                                case KW_READONLY:
                                case KW_READS:
                                case KW_REBUILD:
                                case KW_RECORDREADER:
                                case KW_RECORDWRITER:
                                case KW_REGEXP:
                                case KW_RENAME:
                                case KW_REPAIR:
                                case KW_REPLACE:
                                case KW_RESTRICT:
                                case KW_REVOKE:
                                case KW_REWRITE:
                                case KW_RIGHT:
                                case KW_RLIKE:
                                case KW_ROLE:
                                case KW_ROLES:
                                case KW_ROLLUP:
                                case KW_ROW:
                                case KW_ROWS:
                                case KW_SCHEMA:
                                case KW_SCHEMAS:
                                case KW_SEMI:
                                case KW_SERDE:
                                case KW_SERDEPROPERTIES:
                                case KW_SET:
                                case KW_SETS:
                                case KW_SHARED:
                                case KW_SHOW:
                                case KW_SHOW_DATABASE:
                                case KW_SKEWED:
                                case KW_SMALLINT:
                                case KW_SORT:
                                case KW_SORTED:
                                case KW_SSL:
                                case KW_STATISTICS:
                                case KW_STORED:
                                case KW_STREAMTABLE:
                                case KW_STRING:
                                case KW_STRUCT:
                                case KW_TABLE:
                                case KW_TABLES:
                                case KW_TBLPROPERTIES:
                                case KW_TEMPORARY:
                                case KW_TERMINATED:
                                case KW_TIMESTAMP:
                                case KW_TINYINT:
                                case KW_TOUCH:
                                case KW_TRANSACTIONS:
                                case KW_TRIGGER:
                                case KW_TRUE:
                                case KW_TRUNCATE:
                                case KW_UNARCHIVE:
                                case KW_UNDO:
                                case KW_UNION:
                                case KW_UNIONTYPE:
                                case KW_UNLOCK:
                                case KW_UNSET:
                                case KW_UNSIGNED:
                                case KW_UPDATE:
                                case KW_USE:
                                case KW_USER:
                                case KW_USING:
                                case KW_UTC:
                                case KW_UTCTIMESTAMP:
                                case KW_VALUES:
                                case KW_VALUE_TYPE:
                                case KW_VIEW:
                                case KW_WHILE:
                                case KW_WITH:
                                    {
                                    alt151=1;
                                    }
                                    break;
                                case KW_PARTITION:
                                    {
                                    switch ( input.LA(3) ) {
                                        case DOT:
                                        case KW_FROM:
                                        case KW_PARTITION:
                                        case KW_TO:
                                            {
                                            alt151=1;
                                            }
                                            break;
                                    }

                                    }
                                    break;
                                case KW_TO:
                                    {
                                    switch ( input.LA(3) ) {
                                        case DOT:
                                        case KW_FROM:
                                        case KW_PARTITION:
                                        case KW_TO:
                                            {
                                            alt151=1;
                                            }
                                            break;
                                    }

                                    }
                                    break;
                            }

                            }
                            break;
                    }

                    switch (alt151) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1487:7: KW_TABLE
                            {
                            KW_TABLE521=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_privObject8087);  
                            stream_KW_TABLE.add(KW_TABLE521);


                            }
                            break;

                    }


                    pushFollow(FOLLOW_tableName_in_privObject8090);
                    tableName522=tableName();

                    state._fsp--;

                    stream_tableName.add(tableName522.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1487:27: ( partitionSpec )?
                    int alt152=2;
                    switch ( input.LA(1) ) {
                        case KW_PARTITION:
                            {
                            alt152=1;
                            }
                            break;
                    }

                    switch (alt152) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1487:27: partitionSpec
                            {
                            pushFollow(FOLLOW_partitionSpec_in_privObject8092);
                            partitionSpec523=partitionSpec();

                            state._fsp--;

                            stream_partitionSpec.add(partitionSpec523.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: partitionSpec, tableName
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1487:42: -> ^( TOK_TABLE_TYPE tableName ( partitionSpec )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1487:45: ^( TOK_TABLE_TYPE tableName ( partitionSpec )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLE_TYPE, "TOK_TABLE_TYPE")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1487:72: ( partitionSpec )?
                        if ( stream_partitionSpec.hasNext() ) {
                            adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                        }
                        stream_partitionSpec.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "privObject"


    public static class privObjectCols_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "privObjectCols"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1490:1: privObjectCols : ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( LPAREN cols= columnNameList RPAREN )? ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? ) );
    public final HiveParser.privObjectCols_return privObjectCols() throws RecognitionException {
        HiveParser.privObjectCols_return retval = new HiveParser.privObjectCols_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DATABASE524=null;
        Token KW_SCHEMA525=null;
        Token KW_TABLE527=null;
        Token LPAREN529=null;
        Token RPAREN530=null;
        HiveParser.columnNameList_return cols =null;

        HiveParser_IdentifiersParser.identifier_return identifier526 =null;

        HiveParser_FromClauseParser.tableName_return tableName528 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec531 =null;


        CommonTree KW_DATABASE524_tree=null;
        CommonTree KW_SCHEMA525_tree=null;
        CommonTree KW_TABLE527_tree=null;
        CommonTree LPAREN529_tree=null;
        CommonTree RPAREN530_tree=null;
        RewriteRuleTokenStream stream_KW_SCHEMA=new RewriteRuleTokenStream(adaptor,"token KW_SCHEMA");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_DATABASE");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1491:5: ( ( KW_DATABASE | KW_SCHEMA ) identifier -> ^( TOK_DB_TYPE identifier ) | ( KW_TABLE )? tableName ( LPAREN cols= columnNameList RPAREN )? ( partitionSpec )? -> ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? ) )
            int alt158=2;
            switch ( input.LA(1) ) {
            case KW_DATABASE:
                {
                alt158=1;
                }
                break;
            case KW_SCHEMA:
                {
                switch ( input.LA(2) ) {
                case Identifier:
                case KW_ADD:
                case KW_ADMIN:
                case KW_AFTER:
                case KW_ALL:
                case KW_ALTER:
                case KW_ANALYZE:
                case KW_ARCHIVE:
                case KW_ARRAY:
                case KW_AS:
                case KW_ASC:
                case KW_AUTHORIZATION:
                case KW_BEFORE:
                case KW_BETWEEN:
                case KW_BIGINT:
                case KW_BINARY:
                case KW_BOOLEAN:
                case KW_BOTH:
                case KW_BUCKET:
                case KW_BUCKETS:
                case KW_BY:
                case KW_CASCADE:
                case KW_CHANGE:
                case KW_CLUSTER:
                case KW_CLUSTERED:
                case KW_CLUSTERSTATUS:
                case KW_COLLECTION:
                case KW_COLUMNS:
                case KW_COMMENT:
                case KW_COMPACT:
                case KW_COMPACTIONS:
                case KW_COMPUTE:
                case KW_CONCATENATE:
                case KW_CONTINUE:
                case KW_CREATE:
                case KW_CUBE:
                case KW_CURSOR:
                case KW_DATA:
                case KW_DATABASES:
                case KW_DATE:
                case KW_DATETIME:
                case KW_DBPROPERTIES:
                case KW_DECIMAL:
                case KW_DEFAULT:
                case KW_DEFERRED:
                case KW_DEFINED:
                case KW_DELETE:
                case KW_DELIMITED:
                case KW_DEPENDENCY:
                case KW_DESC:
                case KW_DESCRIBE:
                case KW_DIRECTORIES:
                case KW_DIRECTORY:
                case KW_DISABLE:
                case KW_DISTRIBUTE:
                case KW_DOUBLE:
                case KW_DROP:
                case KW_ELEM_TYPE:
                case KW_ENABLE:
                case KW_ESCAPED:
                case KW_EXCLUSIVE:
                case KW_EXISTS:
                case KW_EXPLAIN:
                case KW_EXPORT:
                case KW_EXTERNAL:
                case KW_FALSE:
                case KW_FETCH:
                case KW_FIELDS:
                case KW_FILE:
                case KW_FILEFORMAT:
                case KW_FIRST:
                case KW_FLOAT:
                case KW_FOR:
                case KW_FORMAT:
                case KW_FORMATTED:
                case KW_FULL:
                case KW_FUNCTIONS:
                case KW_GRANT:
                case KW_GROUP:
                case KW_GROUPING:
                case KW_HOLD_DDLTIME:
                case KW_IDXPROPERTIES:
                case KW_IGNORE:
                case KW_IMPORT:
                case KW_IN:
                case KW_INDEX:
                case KW_INDEXES:
                case KW_INNER:
                case KW_INPATH:
                case KW_INPUTDRIVER:
                case KW_INPUTFORMAT:
                case KW_INSERT:
                case KW_INT:
                case KW_INTERSECT:
                case KW_INTO:
                case KW_IS:
                case KW_ITEMS:
                case KW_JAR:
                case KW_KEYS:
                case KW_KEY_TYPE:
                case KW_LATERAL:
                case KW_LEFT:
                case KW_LIKE:
                case KW_LIMIT:
                case KW_LINES:
                case KW_LOAD:
                case KW_LOCAL:
                case KW_LOCATION:
                case KW_LOCK:
                case KW_LOCKS:
                case KW_LOGICAL:
                case KW_LONG:
                case KW_MAPJOIN:
                case KW_MATERIALIZED:
                case KW_MINUS:
                case KW_MSCK:
                case KW_NONE:
                case KW_NOSCAN:
                case KW_NO_DROP:
                case KW_NULL:
                case KW_OF:
                case KW_OFFLINE:
                case KW_OPTION:
                case KW_ORDER:
                case KW_OUT:
                case KW_OUTER:
                case KW_OUTPUTDRIVER:
                case KW_OUTPUTFORMAT:
                case KW_OVERWRITE:
                case KW_OWNER:
                case KW_PARTITIONED:
                case KW_PARTITIONS:
                case KW_PERCENT:
                case KW_PLUS:
                case KW_PRETTY:
                case KW_PRINCIPALS:
                case KW_PROCEDURE:
                case KW_PROTECTION:
                case KW_PURGE:
                case KW_RANGE:
                case KW_READ:
                case KW_READONLY:
                case KW_READS:
                case KW_REBUILD:
                case KW_RECORDREADER:
                case KW_RECORDWRITER:
                case KW_REGEXP:
                case KW_RENAME:
                case KW_REPAIR:
                case KW_REPLACE:
                case KW_RESTRICT:
                case KW_REVOKE:
                case KW_REWRITE:
                case KW_RIGHT:
                case KW_RLIKE:
                case KW_ROLE:
                case KW_ROLES:
                case KW_ROLLUP:
                case KW_ROW:
                case KW_ROWS:
                case KW_SCHEMA:
                case KW_SCHEMAS:
                case KW_SEMI:
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                case KW_SET:
                case KW_SETS:
                case KW_SHARED:
                case KW_SHOW:
                case KW_SHOW_DATABASE:
                case KW_SKEWED:
                case KW_SMALLINT:
                case KW_SORT:
                case KW_SORTED:
                case KW_SSL:
                case KW_STATISTICS:
                case KW_STORED:
                case KW_STREAMTABLE:
                case KW_STRING:
                case KW_STRUCT:
                case KW_TABLE:
                case KW_TABLES:
                case KW_TBLPROPERTIES:
                case KW_TEMPORARY:
                case KW_TERMINATED:
                case KW_TIMESTAMP:
                case KW_TINYINT:
                case KW_TO:
                case KW_TOUCH:
                case KW_TRANSACTIONS:
                case KW_TRIGGER:
                case KW_TRUE:
                case KW_TRUNCATE:
                case KW_UNARCHIVE:
                case KW_UNDO:
                case KW_UNION:
                case KW_UNIONTYPE:
                case KW_UNLOCK:
                case KW_UNSET:
                case KW_UNSIGNED:
                case KW_UPDATE:
                case KW_USE:
                case KW_USER:
                case KW_USING:
                case KW_UTC:
                case KW_UTCTIMESTAMP:
                case KW_VALUES:
                case KW_VALUE_TYPE:
                case KW_VIEW:
                case KW_WHILE:
                case KW_WITH:
                    {
                    alt158=1;
                    }
                    break;
                case KW_PARTITION:
                    {
                    switch ( input.LA(3) ) {
                    case LPAREN:
                        {
                        alt158=2;
                        }
                        break;
                    case EOF:
                        {
                        alt158=1;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 158, 7, input);

                        throw nvae;

                    }

                    }
                    break;
                case EOF:
                case DOT:
                case LPAREN:
                    {
                    alt158=2;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 158, 2, input);

                    throw nvae;

                }

                }
                break;
            case Identifier:
            case KW_ADD:
            case KW_ADMIN:
            case KW_AFTER:
            case KW_ALL:
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_ARCHIVE:
            case KW_ARRAY:
            case KW_AS:
            case KW_ASC:
            case KW_AUTHORIZATION:
            case KW_BEFORE:
            case KW_BETWEEN:
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_BOTH:
            case KW_BUCKET:
            case KW_BUCKETS:
            case KW_BY:
            case KW_CASCADE:
            case KW_CHANGE:
            case KW_CLUSTER:
            case KW_CLUSTERED:
            case KW_CLUSTERSTATUS:
            case KW_COLLECTION:
            case KW_COLUMNS:
            case KW_COMMENT:
            case KW_COMPACT:
            case KW_COMPACTIONS:
            case KW_COMPUTE:
            case KW_CONCATENATE:
            case KW_CONTINUE:
            case KW_CREATE:
            case KW_CUBE:
            case KW_CURSOR:
            case KW_DATA:
            case KW_DATABASES:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DBPROPERTIES:
            case KW_DECIMAL:
            case KW_DEFAULT:
            case KW_DEFERRED:
            case KW_DEFINED:
            case KW_DELETE:
            case KW_DELIMITED:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DIRECTORIES:
            case KW_DIRECTORY:
            case KW_DISABLE:
            case KW_DISTRIBUTE:
            case KW_DOUBLE:
            case KW_DROP:
            case KW_ELEM_TYPE:
            case KW_ENABLE:
            case KW_ESCAPED:
            case KW_EXCLUSIVE:
            case KW_EXISTS:
            case KW_EXPLAIN:
            case KW_EXPORT:
            case KW_EXTERNAL:
            case KW_FALSE:
            case KW_FETCH:
            case KW_FIELDS:
            case KW_FILE:
            case KW_FILEFORMAT:
            case KW_FIRST:
            case KW_FLOAT:
            case KW_FOR:
            case KW_FORMAT:
            case KW_FORMATTED:
            case KW_FULL:
            case KW_FUNCTIONS:
            case KW_GRANT:
            case KW_GROUP:
            case KW_GROUPING:
            case KW_HOLD_DDLTIME:
            case KW_IDXPROPERTIES:
            case KW_IGNORE:
            case KW_IMPORT:
            case KW_IN:
            case KW_INDEX:
            case KW_INDEXES:
            case KW_INNER:
            case KW_INPATH:
            case KW_INPUTDRIVER:
            case KW_INPUTFORMAT:
            case KW_INSERT:
            case KW_INT:
            case KW_INTERSECT:
            case KW_INTO:
            case KW_IS:
            case KW_ITEMS:
            case KW_JAR:
            case KW_KEYS:
            case KW_KEY_TYPE:
            case KW_LATERAL:
            case KW_LEFT:
            case KW_LIKE:
            case KW_LIMIT:
            case KW_LINES:
            case KW_LOAD:
            case KW_LOCAL:
            case KW_LOCATION:
            case KW_LOCK:
            case KW_LOCKS:
            case KW_LOGICAL:
            case KW_LONG:
            case KW_MAPJOIN:
            case KW_MATERIALIZED:
            case KW_MINUS:
            case KW_MSCK:
            case KW_NONE:
            case KW_NOSCAN:
            case KW_NO_DROP:
            case KW_NULL:
            case KW_OF:
            case KW_OFFLINE:
            case KW_OPTION:
            case KW_ORDER:
            case KW_OUT:
            case KW_OUTER:
            case KW_OUTPUTDRIVER:
            case KW_OUTPUTFORMAT:
            case KW_OVERWRITE:
            case KW_OWNER:
            case KW_PARTITION:
            case KW_PARTITIONED:
            case KW_PARTITIONS:
            case KW_PERCENT:
            case KW_PLUS:
            case KW_PRETTY:
            case KW_PRINCIPALS:
            case KW_PROCEDURE:
            case KW_PROTECTION:
            case KW_PURGE:
            case KW_RANGE:
            case KW_READ:
            case KW_READONLY:
            case KW_READS:
            case KW_REBUILD:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REGEXP:
            case KW_RENAME:
            case KW_REPAIR:
            case KW_REPLACE:
            case KW_RESTRICT:
            case KW_REVOKE:
            case KW_REWRITE:
            case KW_RIGHT:
            case KW_RLIKE:
            case KW_ROLE:
            case KW_ROLES:
            case KW_ROLLUP:
            case KW_ROW:
            case KW_ROWS:
            case KW_SCHEMAS:
            case KW_SEMI:
            case KW_SERDE:
            case KW_SERDEPROPERTIES:
            case KW_SET:
            case KW_SETS:
            case KW_SHARED:
            case KW_SHOW:
            case KW_SHOW_DATABASE:
            case KW_SKEWED:
            case KW_SMALLINT:
            case KW_SORT:
            case KW_SORTED:
            case KW_SSL:
            case KW_STATISTICS:
            case KW_STORED:
            case KW_STREAMTABLE:
            case KW_STRING:
            case KW_STRUCT:
            case KW_TABLE:
            case KW_TABLES:
            case KW_TBLPROPERTIES:
            case KW_TEMPORARY:
            case KW_TERMINATED:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_TO:
            case KW_TOUCH:
            case KW_TRANSACTIONS:
            case KW_TRIGGER:
            case KW_TRUE:
            case KW_TRUNCATE:
            case KW_UNARCHIVE:
            case KW_UNDO:
            case KW_UNION:
            case KW_UNIONTYPE:
            case KW_UNLOCK:
            case KW_UNSET:
            case KW_UNSIGNED:
            case KW_UPDATE:
            case KW_USE:
            case KW_USER:
            case KW_USING:
            case KW_UTC:
            case KW_UTCTIMESTAMP:
            case KW_VALUES:
            case KW_VALUE_TYPE:
            case KW_VIEW:
            case KW_WHILE:
            case KW_WITH:
                {
                alt158=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 158, 0, input);

                throw nvae;

            }

            switch (alt158) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1491:7: ( KW_DATABASE | KW_SCHEMA ) identifier
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1491:7: ( KW_DATABASE | KW_SCHEMA )
                    int alt154=2;
                    switch ( input.LA(1) ) {
                    case KW_DATABASE:
                        {
                        alt154=1;
                        }
                        break;
                    case KW_SCHEMA:
                        {
                        alt154=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 154, 0, input);

                        throw nvae;

                    }

                    switch (alt154) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1491:8: KW_DATABASE
                            {
                            KW_DATABASE524=(Token)match(input,KW_DATABASE,FOLLOW_KW_DATABASE_in_privObjectCols8122);  
                            stream_KW_DATABASE.add(KW_DATABASE524);


                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1491:20: KW_SCHEMA
                            {
                            KW_SCHEMA525=(Token)match(input,KW_SCHEMA,FOLLOW_KW_SCHEMA_in_privObjectCols8124);  
                            stream_KW_SCHEMA.add(KW_SCHEMA525);


                            }
                            break;

                    }


                    pushFollow(FOLLOW_identifier_in_privObjectCols8127);
                    identifier526=identifier();

                    state._fsp--;

                    stream_identifier.add(identifier526.getTree());

                    // AST REWRITE
                    // elements: identifier
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1491:42: -> ^( TOK_DB_TYPE identifier )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1491:45: ^( TOK_DB_TYPE identifier )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DB_TYPE, "TOK_DB_TYPE")
                        , root_1);

                        adaptor.addChild(root_1, stream_identifier.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:7: ( KW_TABLE )? tableName ( LPAREN cols= columnNameList RPAREN )? ( partitionSpec )?
                    {
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:7: ( KW_TABLE )?
                    int alt155=2;
                    switch ( input.LA(1) ) {
                        case KW_TABLE:
                            {
                            switch ( input.LA(2) ) {
                                case Identifier:
                                case KW_ADD:
                                case KW_ADMIN:
                                case KW_AFTER:
                                case KW_ALL:
                                case KW_ALTER:
                                case KW_ANALYZE:
                                case KW_ARCHIVE:
                                case KW_ARRAY:
                                case KW_AS:
                                case KW_ASC:
                                case KW_AUTHORIZATION:
                                case KW_BEFORE:
                                case KW_BETWEEN:
                                case KW_BIGINT:
                                case KW_BINARY:
                                case KW_BOOLEAN:
                                case KW_BOTH:
                                case KW_BUCKET:
                                case KW_BUCKETS:
                                case KW_BY:
                                case KW_CASCADE:
                                case KW_CHANGE:
                                case KW_CLUSTER:
                                case KW_CLUSTERED:
                                case KW_CLUSTERSTATUS:
                                case KW_COLLECTION:
                                case KW_COLUMNS:
                                case KW_COMMENT:
                                case KW_COMPACT:
                                case KW_COMPACTIONS:
                                case KW_COMPUTE:
                                case KW_CONCATENATE:
                                case KW_CONTINUE:
                                case KW_CREATE:
                                case KW_CUBE:
                                case KW_CURSOR:
                                case KW_DATA:
                                case KW_DATABASES:
                                case KW_DATE:
                                case KW_DATETIME:
                                case KW_DBPROPERTIES:
                                case KW_DECIMAL:
                                case KW_DEFAULT:
                                case KW_DEFERRED:
                                case KW_DEFINED:
                                case KW_DELETE:
                                case KW_DELIMITED:
                                case KW_DEPENDENCY:
                                case KW_DESC:
                                case KW_DESCRIBE:
                                case KW_DIRECTORIES:
                                case KW_DIRECTORY:
                                case KW_DISABLE:
                                case KW_DISTRIBUTE:
                                case KW_DOUBLE:
                                case KW_DROP:
                                case KW_ELEM_TYPE:
                                case KW_ENABLE:
                                case KW_ESCAPED:
                                case KW_EXCLUSIVE:
                                case KW_EXISTS:
                                case KW_EXPLAIN:
                                case KW_EXPORT:
                                case KW_EXTERNAL:
                                case KW_FALSE:
                                case KW_FETCH:
                                case KW_FIELDS:
                                case KW_FILE:
                                case KW_FILEFORMAT:
                                case KW_FIRST:
                                case KW_FLOAT:
                                case KW_FOR:
                                case KW_FORMAT:
                                case KW_FORMATTED:
                                case KW_FULL:
                                case KW_FUNCTIONS:
                                case KW_GRANT:
                                case KW_GROUP:
                                case KW_GROUPING:
                                case KW_HOLD_DDLTIME:
                                case KW_IDXPROPERTIES:
                                case KW_IGNORE:
                                case KW_IMPORT:
                                case KW_IN:
                                case KW_INDEX:
                                case KW_INDEXES:
                                case KW_INNER:
                                case KW_INPATH:
                                case KW_INPUTDRIVER:
                                case KW_INPUTFORMAT:
                                case KW_INSERT:
                                case KW_INT:
                                case KW_INTERSECT:
                                case KW_INTO:
                                case KW_IS:
                                case KW_ITEMS:
                                case KW_JAR:
                                case KW_KEYS:
                                case KW_KEY_TYPE:
                                case KW_LATERAL:
                                case KW_LEFT:
                                case KW_LIKE:
                                case KW_LIMIT:
                                case KW_LINES:
                                case KW_LOAD:
                                case KW_LOCAL:
                                case KW_LOCATION:
                                case KW_LOCK:
                                case KW_LOCKS:
                                case KW_LOGICAL:
                                case KW_LONG:
                                case KW_MAPJOIN:
                                case KW_MATERIALIZED:
                                case KW_MINUS:
                                case KW_MSCK:
                                case KW_NONE:
                                case KW_NOSCAN:
                                case KW_NO_DROP:
                                case KW_NULL:
                                case KW_OF:
                                case KW_OFFLINE:
                                case KW_OPTION:
                                case KW_ORDER:
                                case KW_OUT:
                                case KW_OUTER:
                                case KW_OUTPUTDRIVER:
                                case KW_OUTPUTFORMAT:
                                case KW_OVERWRITE:
                                case KW_OWNER:
                                case KW_PARTITIONED:
                                case KW_PARTITIONS:
                                case KW_PERCENT:
                                case KW_PLUS:
                                case KW_PRETTY:
                                case KW_PRINCIPALS:
                                case KW_PROCEDURE:
                                case KW_PROTECTION:
                                case KW_PURGE:
                                case KW_RANGE:
                                case KW_READ:
                                case KW_READONLY:
                                case KW_READS:
                                case KW_REBUILD:
                                case KW_RECORDREADER:
                                case KW_RECORDWRITER:
                                case KW_REGEXP:
                                case KW_RENAME:
                                case KW_REPAIR:
                                case KW_REPLACE:
                                case KW_RESTRICT:
                                case KW_REVOKE:
                                case KW_REWRITE:
                                case KW_RIGHT:
                                case KW_RLIKE:
                                case KW_ROLE:
                                case KW_ROLES:
                                case KW_ROLLUP:
                                case KW_ROW:
                                case KW_ROWS:
                                case KW_SCHEMA:
                                case KW_SCHEMAS:
                                case KW_SEMI:
                                case KW_SERDE:
                                case KW_SERDEPROPERTIES:
                                case KW_SET:
                                case KW_SETS:
                                case KW_SHARED:
                                case KW_SHOW:
                                case KW_SHOW_DATABASE:
                                case KW_SKEWED:
                                case KW_SMALLINT:
                                case KW_SORT:
                                case KW_SORTED:
                                case KW_SSL:
                                case KW_STATISTICS:
                                case KW_STORED:
                                case KW_STREAMTABLE:
                                case KW_STRING:
                                case KW_STRUCT:
                                case KW_TABLE:
                                case KW_TABLES:
                                case KW_TBLPROPERTIES:
                                case KW_TEMPORARY:
                                case KW_TERMINATED:
                                case KW_TIMESTAMP:
                                case KW_TINYINT:
                                case KW_TO:
                                case KW_TOUCH:
                                case KW_TRANSACTIONS:
                                case KW_TRIGGER:
                                case KW_TRUE:
                                case KW_TRUNCATE:
                                case KW_UNARCHIVE:
                                case KW_UNDO:
                                case KW_UNION:
                                case KW_UNIONTYPE:
                                case KW_UNLOCK:
                                case KW_UNSET:
                                case KW_UNSIGNED:
                                case KW_UPDATE:
                                case KW_USE:
                                case KW_USER:
                                case KW_USING:
                                case KW_UTC:
                                case KW_UTCTIMESTAMP:
                                case KW_VALUES:
                                case KW_VALUE_TYPE:
                                case KW_VIEW:
                                case KW_WHILE:
                                case KW_WITH:
                                    {
                                    alt155=1;
                                    }
                                    break;
                                case KW_PARTITION:
                                    {
                                    switch ( input.LA(3) ) {
                                        case LPAREN:
                                            {
                                            alt155=1;
                                            }
                                            break;
                                        case EOF:
                                        case DOT:
                                        case KW_PARTITION:
                                            {
                                            alt155=1;
                                            }
                                            break;
                                    }

                                    }
                                    break;
                            }

                            }
                            break;
                    }

                    switch (alt155) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:7: KW_TABLE
                            {
                            KW_TABLE527=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_privObjectCols8143);  
                            stream_KW_TABLE.add(KW_TABLE527);


                            }
                            break;

                    }


                    pushFollow(FOLLOW_tableName_in_privObjectCols8146);
                    tableName528=tableName();

                    state._fsp--;

                    stream_tableName.add(tableName528.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:27: ( LPAREN cols= columnNameList RPAREN )?
                    int alt156=2;
                    switch ( input.LA(1) ) {
                        case LPAREN:
                            {
                            alt156=1;
                            }
                            break;
                    }

                    switch (alt156) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:28: LPAREN cols= columnNameList RPAREN
                            {
                            LPAREN529=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_privObjectCols8149);  
                            stream_LPAREN.add(LPAREN529);


                            pushFollow(FOLLOW_columnNameList_in_privObjectCols8153);
                            cols=columnNameList();

                            state._fsp--;

                            stream_columnNameList.add(cols.getTree());

                            RPAREN530=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_privObjectCols8155);  
                            stream_RPAREN.add(RPAREN530);


                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:64: ( partitionSpec )?
                    int alt157=2;
                    switch ( input.LA(1) ) {
                        case KW_PARTITION:
                            {
                            alt157=1;
                            }
                            break;
                    }

                    switch (alt157) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:64: partitionSpec
                            {
                            pushFollow(FOLLOW_partitionSpec_in_privObjectCols8159);
                            partitionSpec531=partitionSpec();

                            state._fsp--;

                            stream_partitionSpec.add(partitionSpec531.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: tableName, partitionSpec, cols
                    // token labels: 
                    // rule labels: retval, cols
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_cols=new RewriteRuleSubtreeStream(adaptor,"rule cols",cols!=null?cols.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1492:79: -> ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:82: ^( TOK_TABLE_TYPE tableName ( $cols)? ( partitionSpec )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLE_TYPE, "TOK_TABLE_TYPE")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableName.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:110: ( $cols)?
                        if ( stream_cols.hasNext() ) {
                            adaptor.addChild(root_1, stream_cols.nextTree());

                        }
                        stream_cols.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1492:116: ( partitionSpec )?
                        if ( stream_partitionSpec.hasNext() ) {
                            adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                        }
                        stream_partitionSpec.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "privObjectCols"


    public static class privilegeList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "privilegeList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1495:1: privilegeList : privlegeDef ( COMMA privlegeDef )* -> ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ ) ;
    public final HiveParser.privilegeList_return privilegeList() throws RecognitionException {
        HiveParser.privilegeList_return retval = new HiveParser.privilegeList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA533=null;
        HiveParser.privlegeDef_return privlegeDef532 =null;

        HiveParser.privlegeDef_return privlegeDef534 =null;


        CommonTree COMMA533_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_privlegeDef=new RewriteRuleSubtreeStream(adaptor,"rule privlegeDef");
        pushMsg("grant privilege list", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1498:5: ( privlegeDef ( COMMA privlegeDef )* -> ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1498:7: privlegeDef ( COMMA privlegeDef )*
            {
            pushFollow(FOLLOW_privlegeDef_in_privilegeList8202);
            privlegeDef532=privlegeDef();

            state._fsp--;

            stream_privlegeDef.add(privlegeDef532.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1498:19: ( COMMA privlegeDef )*
            loop159:
            do {
                int alt159=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt159=1;
                    }
                    break;

                }

                switch (alt159) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1498:20: COMMA privlegeDef
            	    {
            	    COMMA533=(Token)match(input,COMMA,FOLLOW_COMMA_in_privilegeList8205);  
            	    stream_COMMA.add(COMMA533);


            	    pushFollow(FOLLOW_privlegeDef_in_privilegeList8207);
            	    privlegeDef534=privlegeDef();

            	    state._fsp--;

            	    stream_privlegeDef.add(privlegeDef534.getTree());

            	    }
            	    break;

            	default :
            	    break loop159;
                }
            } while (true);


            // AST REWRITE
            // elements: privlegeDef
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1499:5: -> ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1499:8: ^( TOK_PRIVILEGE_LIST ( privlegeDef )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_PRIVILEGE_LIST, "TOK_PRIVILEGE_LIST")
                , root_1);

                if ( !(stream_privlegeDef.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_privlegeDef.hasNext() ) {
                    adaptor.addChild(root_1, stream_privlegeDef.nextTree());

                }
                stream_privlegeDef.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "privilegeList"


    public static class privlegeDef_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "privlegeDef"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1502:1: privlegeDef : privilegeType ( LPAREN cols= columnNameList RPAREN )? -> ^( TOK_PRIVILEGE privilegeType ( $cols)? ) ;
    public final HiveParser.privlegeDef_return privlegeDef() throws RecognitionException {
        HiveParser.privlegeDef_return retval = new HiveParser.privlegeDef_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token LPAREN536=null;
        Token RPAREN537=null;
        HiveParser.columnNameList_return cols =null;

        HiveParser.privilegeType_return privilegeType535 =null;


        CommonTree LPAREN536_tree=null;
        CommonTree RPAREN537_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_privilegeType=new RewriteRuleSubtreeStream(adaptor,"rule privilegeType");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
        pushMsg("grant privilege", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1505:5: ( privilegeType ( LPAREN cols= columnNameList RPAREN )? -> ^( TOK_PRIVILEGE privilegeType ( $cols)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1505:7: privilegeType ( LPAREN cols= columnNameList RPAREN )?
            {
            pushFollow(FOLLOW_privilegeType_in_privlegeDef8249);
            privilegeType535=privilegeType();

            state._fsp--;

            stream_privilegeType.add(privilegeType535.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1505:21: ( LPAREN cols= columnNameList RPAREN )?
            int alt160=2;
            switch ( input.LA(1) ) {
                case LPAREN:
                    {
                    alt160=1;
                    }
                    break;
            }

            switch (alt160) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1505:22: LPAREN cols= columnNameList RPAREN
                    {
                    LPAREN536=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_privlegeDef8252);  
                    stream_LPAREN.add(LPAREN536);


                    pushFollow(FOLLOW_columnNameList_in_privlegeDef8256);
                    cols=columnNameList();

                    state._fsp--;

                    stream_columnNameList.add(cols.getTree());

                    RPAREN537=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_privlegeDef8258);  
                    stream_RPAREN.add(RPAREN537);


                    }
                    break;

            }


            // AST REWRITE
            // elements: cols, privilegeType
            // token labels: 
            // rule labels: retval, cols
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_cols=new RewriteRuleSubtreeStream(adaptor,"rule cols",cols!=null?cols.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1506:5: -> ^( TOK_PRIVILEGE privilegeType ( $cols)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1506:8: ^( TOK_PRIVILEGE privilegeType ( $cols)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_PRIVILEGE, "TOK_PRIVILEGE")
                , root_1);

                adaptor.addChild(root_1, stream_privilegeType.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1506:39: ( $cols)?
                if ( stream_cols.hasNext() ) {
                    adaptor.addChild(root_1, stream_cols.nextTree());

                }
                stream_cols.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "privlegeDef"


    public static class privilegeType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "privilegeType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1509:1: privilegeType : ( KW_ALL -> ^( TOK_PRIV_ALL ) | KW_ALTER -> ^( TOK_PRIV_ALTER_METADATA ) | KW_UPDATE -> ^( TOK_PRIV_ALTER_DATA ) | KW_CREATE -> ^( TOK_PRIV_CREATE ) | KW_DROP -> ^( TOK_PRIV_DROP ) | KW_INDEX -> ^( TOK_PRIV_INDEX ) | KW_LOCK -> ^( TOK_PRIV_LOCK ) | KW_SELECT -> ^( TOK_PRIV_SELECT ) | KW_SHOW_DATABASE -> ^( TOK_PRIV_SHOW_DATABASE ) | KW_INSERT -> ^( TOK_PRIV_INSERT ) | KW_DELETE -> ^( TOK_PRIV_DELETE ) );
    public final HiveParser.privilegeType_return privilegeType() throws RecognitionException {
        HiveParser.privilegeType_return retval = new HiveParser.privilegeType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ALL538=null;
        Token KW_ALTER539=null;
        Token KW_UPDATE540=null;
        Token KW_CREATE541=null;
        Token KW_DROP542=null;
        Token KW_INDEX543=null;
        Token KW_LOCK544=null;
        Token KW_SELECT545=null;
        Token KW_SHOW_DATABASE546=null;
        Token KW_INSERT547=null;
        Token KW_DELETE548=null;

        CommonTree KW_ALL538_tree=null;
        CommonTree KW_ALTER539_tree=null;
        CommonTree KW_UPDATE540_tree=null;
        CommonTree KW_CREATE541_tree=null;
        CommonTree KW_DROP542_tree=null;
        CommonTree KW_INDEX543_tree=null;
        CommonTree KW_LOCK544_tree=null;
        CommonTree KW_SELECT545_tree=null;
        CommonTree KW_SHOW_DATABASE546_tree=null;
        CommonTree KW_INSERT547_tree=null;
        CommonTree KW_DELETE548_tree=null;
        RewriteRuleTokenStream stream_KW_ALTER=new RewriteRuleTokenStream(adaptor,"token KW_ALTER");
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_KW_ALL=new RewriteRuleTokenStream(adaptor,"token KW_ALL");
        RewriteRuleTokenStream stream_KW_SELECT=new RewriteRuleTokenStream(adaptor,"token KW_SELECT");
        RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
        RewriteRuleTokenStream stream_KW_SHOW_DATABASE=new RewriteRuleTokenStream(adaptor,"token KW_SHOW_DATABASE");
        RewriteRuleTokenStream stream_KW_INDEX=new RewriteRuleTokenStream(adaptor,"token KW_INDEX");
        RewriteRuleTokenStream stream_KW_DELETE=new RewriteRuleTokenStream(adaptor,"token KW_DELETE");
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_KW_LOCK=new RewriteRuleTokenStream(adaptor,"token KW_LOCK");
        RewriteRuleTokenStream stream_KW_INSERT=new RewriteRuleTokenStream(adaptor,"token KW_INSERT");

        pushMsg("privilege type", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1512:5: ( KW_ALL -> ^( TOK_PRIV_ALL ) | KW_ALTER -> ^( TOK_PRIV_ALTER_METADATA ) | KW_UPDATE -> ^( TOK_PRIV_ALTER_DATA ) | KW_CREATE -> ^( TOK_PRIV_CREATE ) | KW_DROP -> ^( TOK_PRIV_DROP ) | KW_INDEX -> ^( TOK_PRIV_INDEX ) | KW_LOCK -> ^( TOK_PRIV_LOCK ) | KW_SELECT -> ^( TOK_PRIV_SELECT ) | KW_SHOW_DATABASE -> ^( TOK_PRIV_SHOW_DATABASE ) | KW_INSERT -> ^( TOK_PRIV_INSERT ) | KW_DELETE -> ^( TOK_PRIV_DELETE ) )
            int alt161=11;
            switch ( input.LA(1) ) {
            case KW_ALL:
                {
                alt161=1;
                }
                break;
            case KW_ALTER:
                {
                alt161=2;
                }
                break;
            case KW_UPDATE:
                {
                alt161=3;
                }
                break;
            case KW_CREATE:
                {
                alt161=4;
                }
                break;
            case KW_DROP:
                {
                alt161=5;
                }
                break;
            case KW_INDEX:
                {
                alt161=6;
                }
                break;
            case KW_LOCK:
                {
                alt161=7;
                }
                break;
            case KW_SELECT:
                {
                alt161=8;
                }
                break;
            case KW_SHOW_DATABASE:
                {
                alt161=9;
                }
                break;
            case KW_INSERT:
                {
                alt161=10;
                }
                break;
            case KW_DELETE:
                {
                alt161=11;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 161, 0, input);

                throw nvae;

            }

            switch (alt161) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1512:7: KW_ALL
                    {
                    KW_ALL538=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_privilegeType8303);  
                    stream_KW_ALL.add(KW_ALL538);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1512:14: -> ^( TOK_PRIV_ALL )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1512:17: ^( TOK_PRIV_ALL )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_ALL, "TOK_PRIV_ALL")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1513:7: KW_ALTER
                    {
                    KW_ALTER539=(Token)match(input,KW_ALTER,FOLLOW_KW_ALTER_in_privilegeType8317);  
                    stream_KW_ALTER.add(KW_ALTER539);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1513:16: -> ^( TOK_PRIV_ALTER_METADATA )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1513:19: ^( TOK_PRIV_ALTER_METADATA )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_ALTER_METADATA, "TOK_PRIV_ALTER_METADATA")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1514:7: KW_UPDATE
                    {
                    KW_UPDATE540=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_privilegeType8331);  
                    stream_KW_UPDATE.add(KW_UPDATE540);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1514:17: -> ^( TOK_PRIV_ALTER_DATA )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1514:20: ^( TOK_PRIV_ALTER_DATA )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_ALTER_DATA, "TOK_PRIV_ALTER_DATA")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1515:7: KW_CREATE
                    {
                    KW_CREATE541=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_privilegeType8345);  
                    stream_KW_CREATE.add(KW_CREATE541);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1515:17: -> ^( TOK_PRIV_CREATE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1515:20: ^( TOK_PRIV_CREATE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_CREATE, "TOK_PRIV_CREATE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1516:7: KW_DROP
                    {
                    KW_DROP542=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_privilegeType8359);  
                    stream_KW_DROP.add(KW_DROP542);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1516:15: -> ^( TOK_PRIV_DROP )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1516:18: ^( TOK_PRIV_DROP )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_DROP, "TOK_PRIV_DROP")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 6 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1517:7: KW_INDEX
                    {
                    KW_INDEX543=(Token)match(input,KW_INDEX,FOLLOW_KW_INDEX_in_privilegeType8373);  
                    stream_KW_INDEX.add(KW_INDEX543);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1517:16: -> ^( TOK_PRIV_INDEX )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1517:19: ^( TOK_PRIV_INDEX )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_INDEX, "TOK_PRIV_INDEX")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 7 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1518:7: KW_LOCK
                    {
                    KW_LOCK544=(Token)match(input,KW_LOCK,FOLLOW_KW_LOCK_in_privilegeType8387);  
                    stream_KW_LOCK.add(KW_LOCK544);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1518:15: -> ^( TOK_PRIV_LOCK )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1518:18: ^( TOK_PRIV_LOCK )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_LOCK, "TOK_PRIV_LOCK")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 8 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1519:7: KW_SELECT
                    {
                    KW_SELECT545=(Token)match(input,KW_SELECT,FOLLOW_KW_SELECT_in_privilegeType8401);  
                    stream_KW_SELECT.add(KW_SELECT545);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1519:17: -> ^( TOK_PRIV_SELECT )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1519:20: ^( TOK_PRIV_SELECT )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_SELECT, "TOK_PRIV_SELECT")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 9 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1520:7: KW_SHOW_DATABASE
                    {
                    KW_SHOW_DATABASE546=(Token)match(input,KW_SHOW_DATABASE,FOLLOW_KW_SHOW_DATABASE_in_privilegeType8415);  
                    stream_KW_SHOW_DATABASE.add(KW_SHOW_DATABASE546);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1520:24: -> ^( TOK_PRIV_SHOW_DATABASE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1520:27: ^( TOK_PRIV_SHOW_DATABASE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_SHOW_DATABASE, "TOK_PRIV_SHOW_DATABASE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 10 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1521:7: KW_INSERT
                    {
                    KW_INSERT547=(Token)match(input,KW_INSERT,FOLLOW_KW_INSERT_in_privilegeType8429);  
                    stream_KW_INSERT.add(KW_INSERT547);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1521:17: -> ^( TOK_PRIV_INSERT )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1521:20: ^( TOK_PRIV_INSERT )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_INSERT, "TOK_PRIV_INSERT")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 11 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1522:7: KW_DELETE
                    {
                    KW_DELETE548=(Token)match(input,KW_DELETE,FOLLOW_KW_DELETE_in_privilegeType8443);  
                    stream_KW_DELETE.add(KW_DELETE548);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1522:17: -> ^( TOK_PRIV_DELETE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1522:20: ^( TOK_PRIV_DELETE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_PRIV_DELETE, "TOK_PRIV_DELETE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "privilegeType"


    public static class principalSpecification_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "principalSpecification"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1525:1: principalSpecification : principalName ( COMMA principalName )* -> ^( TOK_PRINCIPAL_NAME ( principalName )+ ) ;
    public final HiveParser.principalSpecification_return principalSpecification() throws RecognitionException {
        HiveParser.principalSpecification_return retval = new HiveParser.principalSpecification_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA550=null;
        HiveParser.principalName_return principalName549 =null;

        HiveParser.principalName_return principalName551 =null;


        CommonTree COMMA550_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_principalName=new RewriteRuleSubtreeStream(adaptor,"rule principalName");
         pushMsg("user/group/role name list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1528:5: ( principalName ( COMMA principalName )* -> ^( TOK_PRINCIPAL_NAME ( principalName )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1528:7: principalName ( COMMA principalName )*
            {
            pushFollow(FOLLOW_principalName_in_principalSpecification8476);
            principalName549=principalName();

            state._fsp--;

            stream_principalName.add(principalName549.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1528:21: ( COMMA principalName )*
            loop162:
            do {
                int alt162=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt162=1;
                    }
                    break;

                }

                switch (alt162) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1528:22: COMMA principalName
            	    {
            	    COMMA550=(Token)match(input,COMMA,FOLLOW_COMMA_in_principalSpecification8479);  
            	    stream_COMMA.add(COMMA550);


            	    pushFollow(FOLLOW_principalName_in_principalSpecification8481);
            	    principalName551=principalName();

            	    state._fsp--;

            	    stream_principalName.add(principalName551.getTree());

            	    }
            	    break;

            	default :
            	    break loop162;
                }
            } while (true);


            // AST REWRITE
            // elements: principalName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1528:44: -> ^( TOK_PRINCIPAL_NAME ( principalName )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1528:47: ^( TOK_PRINCIPAL_NAME ( principalName )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_PRINCIPAL_NAME, "TOK_PRINCIPAL_NAME")
                , root_1);

                if ( !(stream_principalName.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_principalName.hasNext() ) {
                    adaptor.addChild(root_1, stream_principalName.nextTree());

                }
                stream_principalName.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "principalSpecification"


    public static class principalName_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "principalName"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1531:1: principalName : ( KW_USER principalIdentifier -> ^( TOK_USER principalIdentifier ) | KW_GROUP principalIdentifier -> ^( TOK_GROUP principalIdentifier ) | KW_ROLE identifier -> ^( TOK_ROLE identifier ) );
    public final HiveParser.principalName_return principalName() throws RecognitionException {
        HiveParser.principalName_return retval = new HiveParser.principalName_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_USER552=null;
        Token KW_GROUP554=null;
        Token KW_ROLE556=null;
        HiveParser_IdentifiersParser.principalIdentifier_return principalIdentifier553 =null;

        HiveParser_IdentifiersParser.principalIdentifier_return principalIdentifier555 =null;

        HiveParser_IdentifiersParser.identifier_return identifier557 =null;


        CommonTree KW_USER552_tree=null;
        CommonTree KW_GROUP554_tree=null;
        CommonTree KW_ROLE556_tree=null;
        RewriteRuleTokenStream stream_KW_ROLE=new RewriteRuleTokenStream(adaptor,"token KW_ROLE");
        RewriteRuleTokenStream stream_KW_GROUP=new RewriteRuleTokenStream(adaptor,"token KW_GROUP");
        RewriteRuleTokenStream stream_KW_USER=new RewriteRuleTokenStream(adaptor,"token KW_USER");
        RewriteRuleSubtreeStream stream_principalIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule principalIdentifier");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        pushMsg("user|group|role name", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1534:5: ( KW_USER principalIdentifier -> ^( TOK_USER principalIdentifier ) | KW_GROUP principalIdentifier -> ^( TOK_GROUP principalIdentifier ) | KW_ROLE identifier -> ^( TOK_ROLE identifier ) )
            int alt163=3;
            switch ( input.LA(1) ) {
            case KW_USER:
                {
                alt163=1;
                }
                break;
            case KW_GROUP:
                {
                alt163=2;
                }
                break;
            case KW_ROLE:
                {
                alt163=3;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 163, 0, input);

                throw nvae;

            }

            switch (alt163) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1534:7: KW_USER principalIdentifier
                    {
                    KW_USER552=(Token)match(input,KW_USER,FOLLOW_KW_USER_in_principalName8519);  
                    stream_KW_USER.add(KW_USER552);


                    pushFollow(FOLLOW_principalIdentifier_in_principalName8521);
                    principalIdentifier553=principalIdentifier();

                    state._fsp--;

                    stream_principalIdentifier.add(principalIdentifier553.getTree());

                    // AST REWRITE
                    // elements: principalIdentifier
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1534:35: -> ^( TOK_USER principalIdentifier )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1534:38: ^( TOK_USER principalIdentifier )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_USER, "TOK_USER")
                        , root_1);

                        adaptor.addChild(root_1, stream_principalIdentifier.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1535:7: KW_GROUP principalIdentifier
                    {
                    KW_GROUP554=(Token)match(input,KW_GROUP,FOLLOW_KW_GROUP_in_principalName8537);  
                    stream_KW_GROUP.add(KW_GROUP554);


                    pushFollow(FOLLOW_principalIdentifier_in_principalName8539);
                    principalIdentifier555=principalIdentifier();

                    state._fsp--;

                    stream_principalIdentifier.add(principalIdentifier555.getTree());

                    // AST REWRITE
                    // elements: principalIdentifier
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1535:36: -> ^( TOK_GROUP principalIdentifier )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1535:39: ^( TOK_GROUP principalIdentifier )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_GROUP, "TOK_GROUP")
                        , root_1);

                        adaptor.addChild(root_1, stream_principalIdentifier.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1536:7: KW_ROLE identifier
                    {
                    KW_ROLE556=(Token)match(input,KW_ROLE,FOLLOW_KW_ROLE_in_principalName8555);  
                    stream_KW_ROLE.add(KW_ROLE556);


                    pushFollow(FOLLOW_identifier_in_principalName8557);
                    identifier557=identifier();

                    state._fsp--;

                    stream_identifier.add(identifier557.getTree());

                    // AST REWRITE
                    // elements: identifier
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1536:26: -> ^( TOK_ROLE identifier )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1536:29: ^( TOK_ROLE identifier )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ROLE, "TOK_ROLE")
                        , root_1);

                        adaptor.addChild(root_1, stream_identifier.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "principalName"


    public static class withGrantOption_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "withGrantOption"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1539:1: withGrantOption : KW_WITH KW_GRANT KW_OPTION -> ^( TOK_GRANT_WITH_OPTION ) ;
    public final HiveParser.withGrantOption_return withGrantOption() throws RecognitionException {
        HiveParser.withGrantOption_return retval = new HiveParser.withGrantOption_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_WITH558=null;
        Token KW_GRANT559=null;
        Token KW_OPTION560=null;

        CommonTree KW_WITH558_tree=null;
        CommonTree KW_GRANT559_tree=null;
        CommonTree KW_OPTION560_tree=null;
        RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");
        RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");

        pushMsg("with grant option", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:5: ( KW_WITH KW_GRANT KW_OPTION -> ^( TOK_GRANT_WITH_OPTION ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1542:7: KW_WITH KW_GRANT KW_OPTION
            {
            KW_WITH558=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_withGrantOption8592);  
            stream_KW_WITH.add(KW_WITH558);


            KW_GRANT559=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_withGrantOption8594);  
            stream_KW_GRANT.add(KW_GRANT559);


            KW_OPTION560=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_withGrantOption8596);  
            stream_KW_OPTION.add(KW_OPTION560);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1543:5: -> ^( TOK_GRANT_WITH_OPTION )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1543:8: ^( TOK_GRANT_WITH_OPTION )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_GRANT_WITH_OPTION, "TOK_GRANT_WITH_OPTION")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "withGrantOption"


    public static class grantOptionFor_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "grantOptionFor"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1546:1: grantOptionFor : KW_GRANT KW_OPTION KW_FOR -> ^( TOK_GRANT_OPTION_FOR ) ;
    public final HiveParser.grantOptionFor_return grantOptionFor() throws RecognitionException {
        HiveParser.grantOptionFor_return retval = new HiveParser.grantOptionFor_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_GRANT561=null;
        Token KW_OPTION562=null;
        Token KW_FOR563=null;

        CommonTree KW_GRANT561_tree=null;
        CommonTree KW_OPTION562_tree=null;
        CommonTree KW_FOR563_tree=null;
        RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");
        RewriteRuleTokenStream stream_KW_GRANT=new RewriteRuleTokenStream(adaptor,"token KW_GRANT");
        RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");

        pushMsg("grant option for", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1549:5: ( KW_GRANT KW_OPTION KW_FOR -> ^( TOK_GRANT_OPTION_FOR ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1549:7: KW_GRANT KW_OPTION KW_FOR
            {
            KW_GRANT561=(Token)match(input,KW_GRANT,FOLLOW_KW_GRANT_in_grantOptionFor8633);  
            stream_KW_GRANT.add(KW_GRANT561);


            KW_OPTION562=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_grantOptionFor8635);  
            stream_KW_OPTION.add(KW_OPTION562);


            KW_FOR563=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_grantOptionFor8637);  
            stream_KW_FOR.add(KW_FOR563);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1550:5: -> ^( TOK_GRANT_OPTION_FOR )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1550:8: ^( TOK_GRANT_OPTION_FOR )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_GRANT_OPTION_FOR, "TOK_GRANT_OPTION_FOR")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "grantOptionFor"


    public static class adminOptionFor_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "adminOptionFor"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1553:1: adminOptionFor : KW_ADMIN KW_OPTION KW_FOR -> ^( TOK_ADMIN_OPTION_FOR ) ;
    public final HiveParser.adminOptionFor_return adminOptionFor() throws RecognitionException {
        HiveParser.adminOptionFor_return retval = new HiveParser.adminOptionFor_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ADMIN564=null;
        Token KW_OPTION565=null;
        Token KW_FOR566=null;

        CommonTree KW_ADMIN564_tree=null;
        CommonTree KW_OPTION565_tree=null;
        CommonTree KW_FOR566_tree=null;
        RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");
        RewriteRuleTokenStream stream_KW_FOR=new RewriteRuleTokenStream(adaptor,"token KW_FOR");
        RewriteRuleTokenStream stream_KW_ADMIN=new RewriteRuleTokenStream(adaptor,"token KW_ADMIN");

        pushMsg("admin option for", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1556:5: ( KW_ADMIN KW_OPTION KW_FOR -> ^( TOK_ADMIN_OPTION_FOR ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1556:7: KW_ADMIN KW_OPTION KW_FOR
            {
            KW_ADMIN564=(Token)match(input,KW_ADMIN,FOLLOW_KW_ADMIN_in_adminOptionFor8670);  
            stream_KW_ADMIN.add(KW_ADMIN564);


            KW_OPTION565=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_adminOptionFor8672);  
            stream_KW_OPTION.add(KW_OPTION565);


            KW_FOR566=(Token)match(input,KW_FOR,FOLLOW_KW_FOR_in_adminOptionFor8674);  
            stream_KW_FOR.add(KW_FOR566);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1557:5: -> ^( TOK_ADMIN_OPTION_FOR )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1557:8: ^( TOK_ADMIN_OPTION_FOR )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ADMIN_OPTION_FOR, "TOK_ADMIN_OPTION_FOR")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "adminOptionFor"


    public static class withAdminOption_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "withAdminOption"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1560:1: withAdminOption : KW_WITH KW_ADMIN KW_OPTION -> ^( TOK_GRANT_WITH_ADMIN_OPTION ) ;
    public final HiveParser.withAdminOption_return withAdminOption() throws RecognitionException {
        HiveParser.withAdminOption_return retval = new HiveParser.withAdminOption_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_WITH567=null;
        Token KW_ADMIN568=null;
        Token KW_OPTION569=null;

        CommonTree KW_WITH567_tree=null;
        CommonTree KW_ADMIN568_tree=null;
        CommonTree KW_OPTION569_tree=null;
        RewriteRuleTokenStream stream_KW_OPTION=new RewriteRuleTokenStream(adaptor,"token KW_OPTION");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleTokenStream stream_KW_ADMIN=new RewriteRuleTokenStream(adaptor,"token KW_ADMIN");

        pushMsg("with admin option", state);
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1563:5: ( KW_WITH KW_ADMIN KW_OPTION -> ^( TOK_GRANT_WITH_ADMIN_OPTION ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1563:7: KW_WITH KW_ADMIN KW_OPTION
            {
            KW_WITH567=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_withAdminOption8707);  
            stream_KW_WITH.add(KW_WITH567);


            KW_ADMIN568=(Token)match(input,KW_ADMIN,FOLLOW_KW_ADMIN_in_withAdminOption8709);  
            stream_KW_ADMIN.add(KW_ADMIN568);


            KW_OPTION569=(Token)match(input,KW_OPTION,FOLLOW_KW_OPTION_in_withAdminOption8711);  
            stream_KW_OPTION.add(KW_OPTION569);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1564:5: -> ^( TOK_GRANT_WITH_ADMIN_OPTION )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1564:8: ^( TOK_GRANT_WITH_ADMIN_OPTION )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_GRANT_WITH_ADMIN_OPTION, "TOK_GRANT_WITH_ADMIN_OPTION")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

            popMsg(state);
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "withAdminOption"


    public static class metastoreCheck_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "metastoreCheck"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1567:1: metastoreCheck : KW_MSCK (repair= KW_REPAIR )? ( KW_TABLE tableName ( partitionSpec )? ( COMMA partitionSpec )* )? -> ^( TOK_MSCK ( $repair)? ( tableName ( partitionSpec )* )? ) ;
    public final HiveParser.metastoreCheck_return metastoreCheck() throws RecognitionException {
        HiveParser.metastoreCheck_return retval = new HiveParser.metastoreCheck_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token repair=null;
        Token KW_MSCK570=null;
        Token KW_TABLE571=null;
        Token COMMA574=null;
        HiveParser_FromClauseParser.tableName_return tableName572 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec573 =null;

        HiveParser_IdentifiersParser.partitionSpec_return partitionSpec575 =null;


        CommonTree repair_tree=null;
        CommonTree KW_MSCK570_tree=null;
        CommonTree KW_TABLE571_tree=null;
        CommonTree COMMA574_tree=null;
        RewriteRuleTokenStream stream_KW_MSCK=new RewriteRuleTokenStream(adaptor,"token KW_MSCK");
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleTokenStream stream_KW_REPAIR=new RewriteRuleTokenStream(adaptor,"token KW_REPAIR");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_partitionSpec=new RewriteRuleSubtreeStream(adaptor,"rule partitionSpec");
         pushMsg("metastore check statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:5: ( KW_MSCK (repair= KW_REPAIR )? ( KW_TABLE tableName ( partitionSpec )? ( COMMA partitionSpec )* )? -> ^( TOK_MSCK ( $repair)? ( tableName ( partitionSpec )* )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:7: KW_MSCK (repair= KW_REPAIR )? ( KW_TABLE tableName ( partitionSpec )? ( COMMA partitionSpec )* )?
            {
            KW_MSCK570=(Token)match(input,KW_MSCK,FOLLOW_KW_MSCK_in_metastoreCheck8748);  
            stream_KW_MSCK.add(KW_MSCK570);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:15: (repair= KW_REPAIR )?
            int alt164=2;
            switch ( input.LA(1) ) {
                case KW_REPAIR:
                    {
                    alt164=1;
                    }
                    break;
            }

            switch (alt164) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:16: repair= KW_REPAIR
                    {
                    repair=(Token)match(input,KW_REPAIR,FOLLOW_KW_REPAIR_in_metastoreCheck8753);  
                    stream_KW_REPAIR.add(repair);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:35: ( KW_TABLE tableName ( partitionSpec )? ( COMMA partitionSpec )* )?
            int alt167=2;
            switch ( input.LA(1) ) {
                case KW_TABLE:
                    {
                    alt167=1;
                    }
                    break;
            }

            switch (alt167) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:36: KW_TABLE tableName ( partitionSpec )? ( COMMA partitionSpec )*
                    {
                    KW_TABLE571=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_metastoreCheck8758);  
                    stream_KW_TABLE.add(KW_TABLE571);


                    pushFollow(FOLLOW_tableName_in_metastoreCheck8760);
                    tableName572=tableName();

                    state._fsp--;

                    stream_tableName.add(tableName572.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:55: ( partitionSpec )?
                    int alt165=2;
                    switch ( input.LA(1) ) {
                        case KW_PARTITION:
                            {
                            alt165=1;
                            }
                            break;
                    }

                    switch (alt165) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:55: partitionSpec
                            {
                            pushFollow(FOLLOW_partitionSpec_in_metastoreCheck8762);
                            partitionSpec573=partitionSpec();

                            state._fsp--;

                            stream_partitionSpec.add(partitionSpec573.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:70: ( COMMA partitionSpec )*
                    loop166:
                    do {
                        int alt166=2;
                        switch ( input.LA(1) ) {
                        case COMMA:
                            {
                            alt166=1;
                            }
                            break;

                        }

                        switch (alt166) {
                    	case 1 :
                    	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1570:71: COMMA partitionSpec
                    	    {
                    	    COMMA574=(Token)match(input,COMMA,FOLLOW_COMMA_in_metastoreCheck8766);  
                    	    stream_COMMA.add(COMMA574);


                    	    pushFollow(FOLLOW_partitionSpec_in_metastoreCheck8768);
                    	    partitionSpec575=partitionSpec();

                    	    state._fsp--;

                    	    stream_partitionSpec.add(partitionSpec575.getTree());

                    	    }
                    	    break;

                    	default :
                    	    break loop166;
                        }
                    } while (true);


                    }
                    break;

            }


            // AST REWRITE
            // elements: repair, partitionSpec, tableName
            // token labels: repair
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_repair=new RewriteRuleTokenStream(adaptor,"token repair",repair);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1571:5: -> ^( TOK_MSCK ( $repair)? ( tableName ( partitionSpec )* )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:8: ^( TOK_MSCK ( $repair)? ( tableName ( partitionSpec )* )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_MSCK, "TOK_MSCK")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:20: ( $repair)?
                if ( stream_repair.hasNext() ) {
                    adaptor.addChild(root_1, stream_repair.nextNode());

                }
                stream_repair.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:28: ( tableName ( partitionSpec )* )?
                if ( stream_partitionSpec.hasNext()||stream_tableName.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableName.nextTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1571:39: ( partitionSpec )*
                    while ( stream_partitionSpec.hasNext() ) {
                        adaptor.addChild(root_1, stream_partitionSpec.nextTree());

                    }
                    stream_partitionSpec.reset();

                }
                stream_partitionSpec.reset();
                stream_tableName.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "metastoreCheck"


    public static class resourceList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "resourceList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1574:1: resourceList : resource ( COMMA resource )* -> ^( TOK_RESOURCE_LIST ( resource )+ ) ;
    public final HiveParser.resourceList_return resourceList() throws RecognitionException {
        HiveParser.resourceList_return retval = new HiveParser.resourceList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA577=null;
        HiveParser.resource_return resource576 =null;

        HiveParser.resource_return resource578 =null;


        CommonTree COMMA577_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_resource=new RewriteRuleSubtreeStream(adaptor,"rule resource");
         pushMsg("resource list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1577:3: ( resource ( COMMA resource )* -> ^( TOK_RESOURCE_LIST ( resource )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1578:3: resource ( COMMA resource )*
            {
            pushFollow(FOLLOW_resource_in_resourceList8821);
            resource576=resource();

            state._fsp--;

            stream_resource.add(resource576.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1578:12: ( COMMA resource )*
            loop168:
            do {
                int alt168=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt168=1;
                    }
                    break;

                }

                switch (alt168) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1578:13: COMMA resource
            	    {
            	    COMMA577=(Token)match(input,COMMA,FOLLOW_COMMA_in_resourceList8824);  
            	    stream_COMMA.add(COMMA577);


            	    pushFollow(FOLLOW_resource_in_resourceList8826);
            	    resource578=resource();

            	    state._fsp--;

            	    stream_resource.add(resource578.getTree());

            	    }
            	    break;

            	default :
            	    break loop168;
                }
            } while (true);


            // AST REWRITE
            // elements: resource
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1578:30: -> ^( TOK_RESOURCE_LIST ( resource )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1578:33: ^( TOK_RESOURCE_LIST ( resource )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_RESOURCE_LIST, "TOK_RESOURCE_LIST")
                , root_1);

                if ( !(stream_resource.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_resource.hasNext() ) {
                    adaptor.addChild(root_1, stream_resource.nextTree());

                }
                stream_resource.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "resourceList"


    public static class resource_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "resource"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1581:1: resource : resType= resourceType resPath= StringLiteral -> ^( TOK_RESOURCE_URI $resType $resPath) ;
    public final HiveParser.resource_return resource() throws RecognitionException {
        HiveParser.resource_return retval = new HiveParser.resource_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token resPath=null;
        HiveParser.resourceType_return resType =null;


        CommonTree resPath_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleSubtreeStream stream_resourceType=new RewriteRuleSubtreeStream(adaptor,"rule resourceType");
         pushMsg("resource", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1584:3: (resType= resourceType resPath= StringLiteral -> ^( TOK_RESOURCE_URI $resType $resPath) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1585:3: resType= resourceType resPath= StringLiteral
            {
            pushFollow(FOLLOW_resourceType_in_resource8864);
            resType=resourceType();

            state._fsp--;

            stream_resourceType.add(resType.getTree());

            resPath=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_resource8868);  
            stream_StringLiteral.add(resPath);


            // AST REWRITE
            // elements: resType, resPath
            // token labels: resPath
            // rule labels: retval, resType
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_resPath=new RewriteRuleTokenStream(adaptor,"token resPath",resPath);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_resType=new RewriteRuleSubtreeStream(adaptor,"rule resType",resType!=null?resType.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1585:46: -> ^( TOK_RESOURCE_URI $resType $resPath)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1585:49: ^( TOK_RESOURCE_URI $resType $resPath)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_RESOURCE_URI, "TOK_RESOURCE_URI")
                , root_1);

                adaptor.addChild(root_1, stream_resType.nextTree());

                adaptor.addChild(root_1, stream_resPath.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "resource"


    public static class resourceType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "resourceType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1588:1: resourceType : ( KW_JAR -> ^( TOK_JAR ) | KW_FILE -> ^( TOK_FILE ) | KW_ARCHIVE -> ^( TOK_ARCHIVE ) );
    public final HiveParser.resourceType_return resourceType() throws RecognitionException {
        HiveParser.resourceType_return retval = new HiveParser.resourceType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_JAR579=null;
        Token KW_FILE580=null;
        Token KW_ARCHIVE581=null;

        CommonTree KW_JAR579_tree=null;
        CommonTree KW_FILE580_tree=null;
        CommonTree KW_ARCHIVE581_tree=null;
        RewriteRuleTokenStream stream_KW_FILE=new RewriteRuleTokenStream(adaptor,"token KW_FILE");
        RewriteRuleTokenStream stream_KW_JAR=new RewriteRuleTokenStream(adaptor,"token KW_JAR");
        RewriteRuleTokenStream stream_KW_ARCHIVE=new RewriteRuleTokenStream(adaptor,"token KW_ARCHIVE");

         pushMsg("resource type", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1591:3: ( KW_JAR -> ^( TOK_JAR ) | KW_FILE -> ^( TOK_FILE ) | KW_ARCHIVE -> ^( TOK_ARCHIVE ) )
            int alt169=3;
            switch ( input.LA(1) ) {
            case KW_JAR:
                {
                alt169=1;
                }
                break;
            case KW_FILE:
                {
                alt169=2;
                }
                break;
            case KW_ARCHIVE:
                {
                alt169=3;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 169, 0, input);

                throw nvae;

            }

            switch (alt169) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1592:3: KW_JAR
                    {
                    KW_JAR579=(Token)match(input,KW_JAR,FOLLOW_KW_JAR_in_resourceType8905);  
                    stream_KW_JAR.add(KW_JAR579);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1592:10: -> ^( TOK_JAR )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1592:13: ^( TOK_JAR )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_JAR, "TOK_JAR")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1594:3: KW_FILE
                    {
                    KW_FILE580=(Token)match(input,KW_FILE,FOLLOW_KW_FILE_in_resourceType8919);  
                    stream_KW_FILE.add(KW_FILE580);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1594:11: -> ^( TOK_FILE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1594:14: ^( TOK_FILE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_FILE, "TOK_FILE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1596:3: KW_ARCHIVE
                    {
                    KW_ARCHIVE581=(Token)match(input,KW_ARCHIVE,FOLLOW_KW_ARCHIVE_in_resourceType8933);  
                    stream_KW_ARCHIVE.add(KW_ARCHIVE581);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1596:14: -> ^( TOK_ARCHIVE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1596:17: ^( TOK_ARCHIVE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_ARCHIVE, "TOK_ARCHIVE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "resourceType"


    public static class createFunctionStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "createFunctionStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1599:1: createFunctionStatement : KW_CREATE (temp= KW_TEMPORARY )? KW_FUNCTION functionIdentifier KW_AS StringLiteral ( KW_USING rList= resourceList )? -> {$temp != null}? ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY ) -> ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? ) ;
    public final HiveParser.createFunctionStatement_return createFunctionStatement() throws RecognitionException {
        HiveParser.createFunctionStatement_return retval = new HiveParser.createFunctionStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token temp=null;
        Token KW_CREATE582=null;
        Token KW_FUNCTION583=null;
        Token KW_AS585=null;
        Token StringLiteral586=null;
        Token KW_USING587=null;
        HiveParser.resourceList_return rList =null;

        HiveParser_IdentifiersParser.functionIdentifier_return functionIdentifier584 =null;


        CommonTree temp_tree=null;
        CommonTree KW_CREATE582_tree=null;
        CommonTree KW_FUNCTION583_tree=null;
        CommonTree KW_AS585_tree=null;
        CommonTree StringLiteral586_tree=null;
        CommonTree KW_USING587_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_KW_FUNCTION=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTION");
        RewriteRuleTokenStream stream_KW_USING=new RewriteRuleTokenStream(adaptor,"token KW_USING");
        RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
        RewriteRuleSubtreeStream stream_resourceList=new RewriteRuleSubtreeStream(adaptor,"rule resourceList");
        RewriteRuleSubtreeStream stream_functionIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule functionIdentifier");
         pushMsg("create function statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1602:5: ( KW_CREATE (temp= KW_TEMPORARY )? KW_FUNCTION functionIdentifier KW_AS StringLiteral ( KW_USING rList= resourceList )? -> {$temp != null}? ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY ) -> ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1602:7: KW_CREATE (temp= KW_TEMPORARY )? KW_FUNCTION functionIdentifier KW_AS StringLiteral ( KW_USING rList= resourceList )?
            {
            KW_CREATE582=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createFunctionStatement8964);  
            stream_KW_CREATE.add(KW_CREATE582);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1602:17: (temp= KW_TEMPORARY )?
            int alt170=2;
            switch ( input.LA(1) ) {
                case KW_TEMPORARY:
                    {
                    alt170=1;
                    }
                    break;
            }

            switch (alt170) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1602:18: temp= KW_TEMPORARY
                    {
                    temp=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_createFunctionStatement8969);  
                    stream_KW_TEMPORARY.add(temp);


                    }
                    break;

            }


            KW_FUNCTION583=(Token)match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_createFunctionStatement8973);  
            stream_KW_FUNCTION.add(KW_FUNCTION583);


            pushFollow(FOLLOW_functionIdentifier_in_createFunctionStatement8975);
            functionIdentifier584=functionIdentifier();

            state._fsp--;

            stream_functionIdentifier.add(functionIdentifier584.getTree());

            KW_AS585=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createFunctionStatement8977);  
            stream_KW_AS.add(KW_AS585);


            StringLiteral586=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_createFunctionStatement8979);  
            stream_StringLiteral.add(StringLiteral586);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:7: ( KW_USING rList= resourceList )?
            int alt171=2;
            switch ( input.LA(1) ) {
                case KW_USING:
                    {
                    alt171=1;
                    }
                    break;
            }

            switch (alt171) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1603:8: KW_USING rList= resourceList
                    {
                    KW_USING587=(Token)match(input,KW_USING,FOLLOW_KW_USING_in_createFunctionStatement8988);  
                    stream_KW_USING.add(KW_USING587);


                    pushFollow(FOLLOW_resourceList_in_createFunctionStatement8992);
                    rList=resourceList();

                    state._fsp--;

                    stream_resourceList.add(rList.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: rList, functionIdentifier, StringLiteral, functionIdentifier, rList, StringLiteral
            // token labels: 
            // rule labels: retval, rList
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_rList=new RewriteRuleSubtreeStream(adaptor,"rule rList",rList!=null?rList.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1604:5: -> {$temp != null}? ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY )
            if (temp != null) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1604:25: ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? TOK_TEMPORARY )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEFUNCTION, "TOK_CREATEFUNCTION")
                , root_1);

                adaptor.addChild(root_1, stream_functionIdentifier.nextTree());

                adaptor.addChild(root_1, 
                stream_StringLiteral.nextNode()
                );

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1604:80: ( $rList)?
                if ( stream_rList.hasNext() ) {
                    adaptor.addChild(root_1, stream_rList.nextTree());

                }
                stream_rList.reset();

                adaptor.addChild(root_1, 
                (CommonTree)adaptor.create(TOK_TEMPORARY, "TOK_TEMPORARY")
                );

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1605:5: -> ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1605:25: ^( TOK_CREATEFUNCTION functionIdentifier StringLiteral ( $rList)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEFUNCTION, "TOK_CREATEFUNCTION")
                , root_1);

                adaptor.addChild(root_1, stream_functionIdentifier.nextTree());

                adaptor.addChild(root_1, 
                stream_StringLiteral.nextNode()
                );

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1605:80: ( $rList)?
                if ( stream_rList.hasNext() ) {
                    adaptor.addChild(root_1, stream_rList.nextTree());

                }
                stream_rList.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "createFunctionStatement"


    public static class dropFunctionStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dropFunctionStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1608:1: dropFunctionStatement : KW_DROP (temp= KW_TEMPORARY )? KW_FUNCTION ( ifExists )? functionIdentifier -> {$temp != null}? ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY ) -> ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? ) ;
    public final HiveParser.dropFunctionStatement_return dropFunctionStatement() throws RecognitionException {
        HiveParser.dropFunctionStatement_return retval = new HiveParser.dropFunctionStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token temp=null;
        Token KW_DROP588=null;
        Token KW_FUNCTION589=null;
        HiveParser.ifExists_return ifExists590 =null;

        HiveParser_IdentifiersParser.functionIdentifier_return functionIdentifier591 =null;


        CommonTree temp_tree=null;
        CommonTree KW_DROP588_tree=null;
        CommonTree KW_FUNCTION589_tree=null;
        RewriteRuleTokenStream stream_KW_FUNCTION=new RewriteRuleTokenStream(adaptor,"token KW_FUNCTION");
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
        RewriteRuleSubtreeStream stream_functionIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule functionIdentifier");
         pushMsg("drop function statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1611:5: ( KW_DROP (temp= KW_TEMPORARY )? KW_FUNCTION ( ifExists )? functionIdentifier -> {$temp != null}? ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY ) -> ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1611:7: KW_DROP (temp= KW_TEMPORARY )? KW_FUNCTION ( ifExists )? functionIdentifier
            {
            KW_DROP588=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropFunctionStatement9078);  
            stream_KW_DROP.add(KW_DROP588);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1611:15: (temp= KW_TEMPORARY )?
            int alt172=2;
            switch ( input.LA(1) ) {
                case KW_TEMPORARY:
                    {
                    alt172=1;
                    }
                    break;
            }

            switch (alt172) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1611:16: temp= KW_TEMPORARY
                    {
                    temp=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_dropFunctionStatement9083);  
                    stream_KW_TEMPORARY.add(temp);


                    }
                    break;

            }


            KW_FUNCTION589=(Token)match(input,KW_FUNCTION,FOLLOW_KW_FUNCTION_in_dropFunctionStatement9087);  
            stream_KW_FUNCTION.add(KW_FUNCTION589);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1611:48: ( ifExists )?
            int alt173=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt173=1;
                    }
                    break;
            }

            switch (alt173) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1611:48: ifExists
                    {
                    pushFollow(FOLLOW_ifExists_in_dropFunctionStatement9089);
                    ifExists590=ifExists();

                    state._fsp--;

                    stream_ifExists.add(ifExists590.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_functionIdentifier_in_dropFunctionStatement9092);
            functionIdentifier591=functionIdentifier();

            state._fsp--;

            stream_functionIdentifier.add(functionIdentifier591.getTree());

            // AST REWRITE
            // elements: ifExists, ifExists, functionIdentifier, functionIdentifier
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1612:5: -> {$temp != null}? ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY )
            if (temp != null) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1612:25: ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? TOK_TEMPORARY )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPFUNCTION, "TOK_DROPFUNCTION")
                , root_1);

                adaptor.addChild(root_1, stream_functionIdentifier.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1612:63: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                adaptor.addChild(root_1, 
                (CommonTree)adaptor.create(TOK_TEMPORARY, "TOK_TEMPORARY")
                );

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1613:5: -> ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1613:25: ^( TOK_DROPFUNCTION functionIdentifier ( ifExists )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPFUNCTION, "TOK_DROPFUNCTION")
                , root_1);

                adaptor.addChild(root_1, stream_functionIdentifier.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1613:63: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dropFunctionStatement"


    public static class createMacroStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "createMacroStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1616:1: createMacroStatement : KW_CREATE KW_TEMPORARY KW_MACRO Identifier LPAREN ( columnNameTypeList )? RPAREN expression -> ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression ) ;
    public final HiveParser.createMacroStatement_return createMacroStatement() throws RecognitionException {
        HiveParser.createMacroStatement_return retval = new HiveParser.createMacroStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_CREATE592=null;
        Token KW_TEMPORARY593=null;
        Token KW_MACRO594=null;
        Token Identifier595=null;
        Token LPAREN596=null;
        Token RPAREN598=null;
        HiveParser.columnNameTypeList_return columnNameTypeList597 =null;

        HiveParser_IdentifiersParser.expression_return expression599 =null;


        CommonTree KW_CREATE592_tree=null;
        CommonTree KW_TEMPORARY593_tree=null;
        CommonTree KW_MACRO594_tree=null;
        CommonTree Identifier595_tree=null;
        CommonTree LPAREN596_tree=null;
        CommonTree RPAREN598_tree=null;
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
        RewriteRuleTokenStream stream_Identifier=new RewriteRuleTokenStream(adaptor,"token Identifier");
        RewriteRuleTokenStream stream_KW_MACRO=new RewriteRuleTokenStream(adaptor,"token KW_MACRO");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");
        RewriteRuleSubtreeStream stream_columnNameTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeList");
         pushMsg("create macro statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1619:5: ( KW_CREATE KW_TEMPORARY KW_MACRO Identifier LPAREN ( columnNameTypeList )? RPAREN expression -> ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1619:7: KW_CREATE KW_TEMPORARY KW_MACRO Identifier LPAREN ( columnNameTypeList )? RPAREN expression
            {
            KW_CREATE592=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createMacroStatement9170);  
            stream_KW_CREATE.add(KW_CREATE592);


            KW_TEMPORARY593=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_createMacroStatement9172);  
            stream_KW_TEMPORARY.add(KW_TEMPORARY593);


            KW_MACRO594=(Token)match(input,KW_MACRO,FOLLOW_KW_MACRO_in_createMacroStatement9174);  
            stream_KW_MACRO.add(KW_MACRO594);


            Identifier595=(Token)match(input,Identifier,FOLLOW_Identifier_in_createMacroStatement9176);  
            stream_Identifier.add(Identifier595);


            LPAREN596=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_createMacroStatement9184);  
            stream_LPAREN.add(LPAREN596);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1620:14: ( columnNameTypeList )?
            int alt174=2;
            switch ( input.LA(1) ) {
                case Identifier:
                case KW_ADD:
                case KW_ADMIN:
                case KW_AFTER:
                case KW_ALL:
                case KW_ALTER:
                case KW_ANALYZE:
                case KW_ARCHIVE:
                case KW_ARRAY:
                case KW_AS:
                case KW_ASC:
                case KW_AUTHORIZATION:
                case KW_BEFORE:
                case KW_BETWEEN:
                case KW_BIGINT:
                case KW_BINARY:
                case KW_BOOLEAN:
                case KW_BOTH:
                case KW_BUCKET:
                case KW_BUCKETS:
                case KW_BY:
                case KW_CASCADE:
                case KW_CHANGE:
                case KW_CLUSTER:
                case KW_CLUSTERED:
                case KW_CLUSTERSTATUS:
                case KW_COLLECTION:
                case KW_COLUMNS:
                case KW_COMMENT:
                case KW_COMPACT:
                case KW_COMPACTIONS:
                case KW_COMPUTE:
                case KW_CONCATENATE:
                case KW_CONTINUE:
                case KW_CREATE:
                case KW_CUBE:
                case KW_CURSOR:
                case KW_DATA:
                case KW_DATABASES:
                case KW_DATE:
                case KW_DATETIME:
                case KW_DBPROPERTIES:
                case KW_DECIMAL:
                case KW_DEFAULT:
                case KW_DEFERRED:
                case KW_DEFINED:
                case KW_DELETE:
                case KW_DELIMITED:
                case KW_DEPENDENCY:
                case KW_DESC:
                case KW_DESCRIBE:
                case KW_DIRECTORIES:
                case KW_DIRECTORY:
                case KW_DISABLE:
                case KW_DISTRIBUTE:
                case KW_DOUBLE:
                case KW_DROP:
                case KW_ELEM_TYPE:
                case KW_ENABLE:
                case KW_ESCAPED:
                case KW_EXCLUSIVE:
                case KW_EXISTS:
                case KW_EXPLAIN:
                case KW_EXPORT:
                case KW_EXTERNAL:
                case KW_FALSE:
                case KW_FETCH:
                case KW_FIELDS:
                case KW_FILE:
                case KW_FILEFORMAT:
                case KW_FIRST:
                case KW_FLOAT:
                case KW_FOR:
                case KW_FORMAT:
                case KW_FORMATTED:
                case KW_FULL:
                case KW_FUNCTIONS:
                case KW_GRANT:
                case KW_GROUP:
                case KW_GROUPING:
                case KW_HOLD_DDLTIME:
                case KW_IDXPROPERTIES:
                case KW_IGNORE:
                case KW_IMPORT:
                case KW_IN:
                case KW_INDEX:
                case KW_INDEXES:
                case KW_INNER:
                case KW_INPATH:
                case KW_INPUTDRIVER:
                case KW_INPUTFORMAT:
                case KW_INSERT:
                case KW_INT:
                case KW_INTERSECT:
                case KW_INTO:
                case KW_IS:
                case KW_ITEMS:
                case KW_JAR:
                case KW_KEYS:
                case KW_KEY_TYPE:
                case KW_LATERAL:
                case KW_LEFT:
                case KW_LIKE:
                case KW_LIMIT:
                case KW_LINES:
                case KW_LOAD:
                case KW_LOCAL:
                case KW_LOCATION:
                case KW_LOCK:
                case KW_LOCKS:
                case KW_LOGICAL:
                case KW_LONG:
                case KW_MAPJOIN:
                case KW_MATERIALIZED:
                case KW_MINUS:
                case KW_MSCK:
                case KW_NONE:
                case KW_NOSCAN:
                case KW_NO_DROP:
                case KW_NULL:
                case KW_OF:
                case KW_OFFLINE:
                case KW_OPTION:
                case KW_ORDER:
                case KW_OUT:
                case KW_OUTER:
                case KW_OUTPUTDRIVER:
                case KW_OUTPUTFORMAT:
                case KW_OVERWRITE:
                case KW_OWNER:
                case KW_PARTITION:
                case KW_PARTITIONED:
                case KW_PARTITIONS:
                case KW_PERCENT:
                case KW_PLUS:
                case KW_PRETTY:
                case KW_PRINCIPALS:
                case KW_PROCEDURE:
                case KW_PROTECTION:
                case KW_PURGE:
                case KW_RANGE:
                case KW_READ:
                case KW_READONLY:
                case KW_READS:
                case KW_REBUILD:
                case KW_RECORDREADER:
                case KW_RECORDWRITER:
                case KW_REGEXP:
                case KW_RENAME:
                case KW_REPAIR:
                case KW_REPLACE:
                case KW_RESTRICT:
                case KW_REVOKE:
                case KW_REWRITE:
                case KW_RIGHT:
                case KW_RLIKE:
                case KW_ROLE:
                case KW_ROLES:
                case KW_ROLLUP:
                case KW_ROW:
                case KW_ROWS:
                case KW_SCHEMA:
                case KW_SCHEMAS:
                case KW_SEMI:
                case KW_SERDE:
                case KW_SERDEPROPERTIES:
                case KW_SET:
                case KW_SETS:
                case KW_SHARED:
                case KW_SHOW:
                case KW_SHOW_DATABASE:
                case KW_SKEWED:
                case KW_SMALLINT:
                case KW_SORT:
                case KW_SORTED:
                case KW_SSL:
                case KW_STATISTICS:
                case KW_STORED:
                case KW_STREAMTABLE:
                case KW_STRING:
                case KW_STRUCT:
                case KW_TABLE:
                case KW_TABLES:
                case KW_TBLPROPERTIES:
                case KW_TEMPORARY:
                case KW_TERMINATED:
                case KW_TIMESTAMP:
                case KW_TINYINT:
                case KW_TO:
                case KW_TOUCH:
                case KW_TRANSACTIONS:
                case KW_TRIGGER:
                case KW_TRUE:
                case KW_TRUNCATE:
                case KW_UNARCHIVE:
                case KW_UNDO:
                case KW_UNION:
                case KW_UNIONTYPE:
                case KW_UNLOCK:
                case KW_UNSET:
                case KW_UNSIGNED:
                case KW_UPDATE:
                case KW_USE:
                case KW_USER:
                case KW_USING:
                case KW_UTC:
                case KW_UTCTIMESTAMP:
                case KW_VALUES:
                case KW_VALUE_TYPE:
                case KW_VIEW:
                case KW_WHILE:
                case KW_WITH:
                    {
                    alt174=1;
                    }
                    break;
            }

            switch (alt174) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1620:14: columnNameTypeList
                    {
                    pushFollow(FOLLOW_columnNameTypeList_in_createMacroStatement9186);
                    columnNameTypeList597=columnNameTypeList();

                    state._fsp--;

                    stream_columnNameTypeList.add(columnNameTypeList597.getTree());

                    }
                    break;

            }


            RPAREN598=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_createMacroStatement9189);  
            stream_RPAREN.add(RPAREN598);


            pushFollow(FOLLOW_expression_in_createMacroStatement9191);
            expression599=expression();

            state._fsp--;

            stream_expression.add(expression599.getTree());

            // AST REWRITE
            // elements: columnNameTypeList, expression, Identifier
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1621:5: -> ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:8: ^( TOK_CREATEMACRO Identifier ( columnNameTypeList )? expression )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEMACRO, "TOK_CREATEMACRO")
                , root_1);

                adaptor.addChild(root_1, 
                stream_Identifier.nextNode()
                );

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1621:37: ( columnNameTypeList )?
                if ( stream_columnNameTypeList.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());

                }
                stream_columnNameTypeList.reset();

                adaptor.addChild(root_1, stream_expression.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "createMacroStatement"


    public static class dropMacroStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dropMacroStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1624:1: dropMacroStatement : KW_DROP KW_TEMPORARY KW_MACRO ( ifExists )? Identifier -> ^( TOK_DROPMACRO Identifier ( ifExists )? ) ;
    public final HiveParser.dropMacroStatement_return dropMacroStatement() throws RecognitionException {
        HiveParser.dropMacroStatement_return retval = new HiveParser.dropMacroStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DROP600=null;
        Token KW_TEMPORARY601=null;
        Token KW_MACRO602=null;
        Token Identifier604=null;
        HiveParser.ifExists_return ifExists603 =null;


        CommonTree KW_DROP600_tree=null;
        CommonTree KW_TEMPORARY601_tree=null;
        CommonTree KW_MACRO602_tree=null;
        CommonTree Identifier604_tree=null;
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_KW_TEMPORARY=new RewriteRuleTokenStream(adaptor,"token KW_TEMPORARY");
        RewriteRuleTokenStream stream_Identifier=new RewriteRuleTokenStream(adaptor,"token Identifier");
        RewriteRuleTokenStream stream_KW_MACRO=new RewriteRuleTokenStream(adaptor,"token KW_MACRO");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
         pushMsg("drop macro statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:5: ( KW_DROP KW_TEMPORARY KW_MACRO ( ifExists )? Identifier -> ^( TOK_DROPMACRO Identifier ( ifExists )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:7: KW_DROP KW_TEMPORARY KW_MACRO ( ifExists )? Identifier
            {
            KW_DROP600=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropMacroStatement9235);  
            stream_KW_DROP.add(KW_DROP600);


            KW_TEMPORARY601=(Token)match(input,KW_TEMPORARY,FOLLOW_KW_TEMPORARY_in_dropMacroStatement9237);  
            stream_KW_TEMPORARY.add(KW_TEMPORARY601);


            KW_MACRO602=(Token)match(input,KW_MACRO,FOLLOW_KW_MACRO_in_dropMacroStatement9239);  
            stream_KW_MACRO.add(KW_MACRO602);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:37: ( ifExists )?
            int alt175=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt175=1;
                    }
                    break;
            }

            switch (alt175) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1627:37: ifExists
                    {
                    pushFollow(FOLLOW_ifExists_in_dropMacroStatement9241);
                    ifExists603=ifExists();

                    state._fsp--;

                    stream_ifExists.add(ifExists603.getTree());

                    }
                    break;

            }


            Identifier604=(Token)match(input,Identifier,FOLLOW_Identifier_in_dropMacroStatement9244);  
            stream_Identifier.add(Identifier604);


            // AST REWRITE
            // elements: ifExists, Identifier
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1628:5: -> ^( TOK_DROPMACRO Identifier ( ifExists )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1628:8: ^( TOK_DROPMACRO Identifier ( ifExists )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPMACRO, "TOK_DROPMACRO")
                , root_1);

                adaptor.addChild(root_1, 
                stream_Identifier.nextNode()
                );

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1628:35: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dropMacroStatement"


    public static class createViewStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "createViewStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1631:1: createViewStatement : KW_CREATE ( orReplace )? KW_VIEW ( ifNotExists )? name= tableName ( LPAREN columnNameCommentList RPAREN )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE -> ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE ) ;
    public final HiveParser.createViewStatement_return createViewStatement() throws RecognitionException {
        HiveParser.createViewStatement_return retval = new HiveParser.createViewStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_CREATE605=null;
        Token KW_VIEW607=null;
        Token LPAREN609=null;
        Token RPAREN611=null;
        Token KW_AS615=null;
        HiveParser_FromClauseParser.tableName_return name =null;

        HiveParser.orReplace_return orReplace606 =null;

        HiveParser.ifNotExists_return ifNotExists608 =null;

        HiveParser.columnNameCommentList_return columnNameCommentList610 =null;

        HiveParser.tableComment_return tableComment612 =null;

        HiveParser.viewPartition_return viewPartition613 =null;

        HiveParser.tablePropertiesPrefixed_return tablePropertiesPrefixed614 =null;

        HiveParser.selectStatementWithCTE_return selectStatementWithCTE616 =null;


        CommonTree KW_CREATE605_tree=null;
        CommonTree KW_VIEW607_tree=null;
        CommonTree LPAREN609_tree=null;
        CommonTree RPAREN611_tree=null;
        CommonTree KW_AS615_tree=null;
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_CREATE=new RewriteRuleTokenStream(adaptor,"token KW_CREATE");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_selectStatementWithCTE=new RewriteRuleSubtreeStream(adaptor,"rule selectStatementWithCTE");
        RewriteRuleSubtreeStream stream_columnNameCommentList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameCommentList");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_orReplace=new RewriteRuleSubtreeStream(adaptor,"rule orReplace");
        RewriteRuleSubtreeStream stream_tableComment=new RewriteRuleSubtreeStream(adaptor,"rule tableComment");
        RewriteRuleSubtreeStream stream_tablePropertiesPrefixed=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesPrefixed");
        RewriteRuleSubtreeStream stream_viewPartition=new RewriteRuleSubtreeStream(adaptor,"rule viewPartition");
        RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");

            pushMsg("create view statement", state);

        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1636:5: ( KW_CREATE ( orReplace )? KW_VIEW ( ifNotExists )? name= tableName ( LPAREN columnNameCommentList RPAREN )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE -> ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1636:7: KW_CREATE ( orReplace )? KW_VIEW ( ifNotExists )? name= tableName ( LPAREN columnNameCommentList RPAREN )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? KW_AS selectStatementWithCTE
            {
            KW_CREATE605=(Token)match(input,KW_CREATE,FOLLOW_KW_CREATE_in_createViewStatement9286);  
            stream_KW_CREATE.add(KW_CREATE605);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1636:17: ( orReplace )?
            int alt176=2;
            switch ( input.LA(1) ) {
                case KW_OR:
                    {
                    alt176=1;
                    }
                    break;
            }

            switch (alt176) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1636:18: orReplace
                    {
                    pushFollow(FOLLOW_orReplace_in_createViewStatement9289);
                    orReplace606=orReplace();

                    state._fsp--;

                    stream_orReplace.add(orReplace606.getTree());

                    }
                    break;

            }


            KW_VIEW607=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_createViewStatement9293);  
            stream_KW_VIEW.add(KW_VIEW607);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1636:38: ( ifNotExists )?
            int alt177=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt177=1;
                    }
                    break;
            }

            switch (alt177) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1636:39: ifNotExists
                    {
                    pushFollow(FOLLOW_ifNotExists_in_createViewStatement9296);
                    ifNotExists608=ifNotExists();

                    state._fsp--;

                    stream_ifNotExists.add(ifNotExists608.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_tableName_in_createViewStatement9302);
            name=tableName();

            state._fsp--;

            stream_tableName.add(name.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1637:9: ( LPAREN columnNameCommentList RPAREN )?
            int alt178=2;
            switch ( input.LA(1) ) {
                case LPAREN:
                    {
                    alt178=1;
                    }
                    break;
            }

            switch (alt178) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1637:10: LPAREN columnNameCommentList RPAREN
                    {
                    LPAREN609=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_createViewStatement9313);  
                    stream_LPAREN.add(LPAREN609);


                    pushFollow(FOLLOW_columnNameCommentList_in_createViewStatement9315);
                    columnNameCommentList610=columnNameCommentList();

                    state._fsp--;

                    stream_columnNameCommentList.add(columnNameCommentList610.getTree());

                    RPAREN611=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_createViewStatement9317);  
                    stream_RPAREN.add(RPAREN611);


                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1637:48: ( tableComment )?
            int alt179=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt179=1;
                    }
                    break;
            }

            switch (alt179) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1637:48: tableComment
                    {
                    pushFollow(FOLLOW_tableComment_in_createViewStatement9321);
                    tableComment612=tableComment();

                    state._fsp--;

                    stream_tableComment.add(tableComment612.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1637:62: ( viewPartition )?
            int alt180=2;
            switch ( input.LA(1) ) {
                case KW_PARTITIONED:
                    {
                    alt180=1;
                    }
                    break;
            }

            switch (alt180) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1637:62: viewPartition
                    {
                    pushFollow(FOLLOW_viewPartition_in_createViewStatement9324);
                    viewPartition613=viewPartition();

                    state._fsp--;

                    stream_viewPartition.add(viewPartition613.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1638:9: ( tablePropertiesPrefixed )?
            int alt181=2;
            switch ( input.LA(1) ) {
                case KW_TBLPROPERTIES:
                    {
                    alt181=1;
                    }
                    break;
            }

            switch (alt181) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1638:9: tablePropertiesPrefixed
                    {
                    pushFollow(FOLLOW_tablePropertiesPrefixed_in_createViewStatement9335);
                    tablePropertiesPrefixed614=tablePropertiesPrefixed();

                    state._fsp--;

                    stream_tablePropertiesPrefixed.add(tablePropertiesPrefixed614.getTree());

                    }
                    break;

            }


            KW_AS615=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_createViewStatement9346);  
            stream_KW_AS.add(KW_AS615);


            pushFollow(FOLLOW_selectStatementWithCTE_in_createViewStatement9356);
            selectStatementWithCTE616=selectStatementWithCTE();

            state._fsp--;

            stream_selectStatementWithCTE.add(selectStatementWithCTE616.getTree());

            // AST REWRITE
            // elements: viewPartition, columnNameCommentList, name, orReplace, selectStatementWithCTE, tablePropertiesPrefixed, ifNotExists, tableComment
            // token labels: 
            // rule labels: retval, name
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_name=new RewriteRuleSubtreeStream(adaptor,"rule name",name!=null?name.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1641:5: -> ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1641:8: ^( TOK_CREATEVIEW $name ( orReplace )? ( ifNotExists )? ( columnNameCommentList )? ( tableComment )? ( viewPartition )? ( tablePropertiesPrefixed )? selectStatementWithCTE )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CREATEVIEW, "TOK_CREATEVIEW")
                , root_1);

                adaptor.addChild(root_1, stream_name.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1641:31: ( orReplace )?
                if ( stream_orReplace.hasNext() ) {
                    adaptor.addChild(root_1, stream_orReplace.nextTree());

                }
                stream_orReplace.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1642:10: ( ifNotExists )?
                if ( stream_ifNotExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifNotExists.nextTree());

                }
                stream_ifNotExists.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1643:10: ( columnNameCommentList )?
                if ( stream_columnNameCommentList.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameCommentList.nextTree());

                }
                stream_columnNameCommentList.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1644:10: ( tableComment )?
                if ( stream_tableComment.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableComment.nextTree());

                }
                stream_tableComment.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1645:10: ( viewPartition )?
                if ( stream_viewPartition.hasNext() ) {
                    adaptor.addChild(root_1, stream_viewPartition.nextTree());

                }
                stream_viewPartition.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1646:10: ( tablePropertiesPrefixed )?
                if ( stream_tablePropertiesPrefixed.hasNext() ) {
                    adaptor.addChild(root_1, stream_tablePropertiesPrefixed.nextTree());

                }
                stream_tablePropertiesPrefixed.reset();

                adaptor.addChild(root_1, stream_selectStatementWithCTE.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "createViewStatement"


    public static class viewPartition_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "viewPartition"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1651:1: viewPartition : KW_PARTITIONED KW_ON LPAREN columnNameList RPAREN -> ^( TOK_VIEWPARTCOLS columnNameList ) ;
    public final HiveParser.viewPartition_return viewPartition() throws RecognitionException {
        HiveParser.viewPartition_return retval = new HiveParser.viewPartition_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_PARTITIONED617=null;
        Token KW_ON618=null;
        Token LPAREN619=null;
        Token RPAREN621=null;
        HiveParser.columnNameList_return columnNameList620 =null;


        CommonTree KW_PARTITIONED617_tree=null;
        CommonTree KW_ON618_tree=null;
        CommonTree LPAREN619_tree=null;
        CommonTree RPAREN621_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_PARTITIONED=new RewriteRuleTokenStream(adaptor,"token KW_PARTITIONED");
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
         pushMsg("view partition specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1654:5: ( KW_PARTITIONED KW_ON LPAREN columnNameList RPAREN -> ^( TOK_VIEWPARTCOLS columnNameList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1654:7: KW_PARTITIONED KW_ON LPAREN columnNameList RPAREN
            {
            KW_PARTITIONED617=(Token)match(input,KW_PARTITIONED,FOLLOW_KW_PARTITIONED_in_viewPartition9479);  
            stream_KW_PARTITIONED.add(KW_PARTITIONED617);


            KW_ON618=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_viewPartition9481);  
            stream_KW_ON.add(KW_ON618);


            LPAREN619=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_viewPartition9483);  
            stream_LPAREN.add(LPAREN619);


            pushFollow(FOLLOW_columnNameList_in_viewPartition9485);
            columnNameList620=columnNameList();

            state._fsp--;

            stream_columnNameList.add(columnNameList620.getTree());

            RPAREN621=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_viewPartition9487);  
            stream_RPAREN.add(RPAREN621);


            // AST REWRITE
            // elements: columnNameList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1655:5: -> ^( TOK_VIEWPARTCOLS columnNameList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1655:8: ^( TOK_VIEWPARTCOLS columnNameList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_VIEWPARTCOLS, "TOK_VIEWPARTCOLS")
                , root_1);

                adaptor.addChild(root_1, stream_columnNameList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "viewPartition"


    public static class dropViewStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "dropViewStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1658:1: dropViewStatement : KW_DROP KW_VIEW ( ifExists )? viewName -> ^( TOK_DROPVIEW viewName ( ifExists )? ) ;
    public final HiveParser.dropViewStatement_return dropViewStatement() throws RecognitionException {
        HiveParser.dropViewStatement_return retval = new HiveParser.dropViewStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DROP622=null;
        Token KW_VIEW623=null;
        HiveParser.ifExists_return ifExists624 =null;

        HiveParser_FromClauseParser.viewName_return viewName625 =null;


        CommonTree KW_DROP622_tree=null;
        CommonTree KW_VIEW623_tree=null;
        RewriteRuleTokenStream stream_KW_DROP=new RewriteRuleTokenStream(adaptor,"token KW_DROP");
        RewriteRuleTokenStream stream_KW_VIEW=new RewriteRuleTokenStream(adaptor,"token KW_VIEW");
        RewriteRuleSubtreeStream stream_viewName=new RewriteRuleSubtreeStream(adaptor,"rule viewName");
        RewriteRuleSubtreeStream stream_ifExists=new RewriteRuleSubtreeStream(adaptor,"rule ifExists");
         pushMsg("drop view statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1661:5: ( KW_DROP KW_VIEW ( ifExists )? viewName -> ^( TOK_DROPVIEW viewName ( ifExists )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1661:7: KW_DROP KW_VIEW ( ifExists )? viewName
            {
            KW_DROP622=(Token)match(input,KW_DROP,FOLLOW_KW_DROP_in_dropViewStatement9526);  
            stream_KW_DROP.add(KW_DROP622);


            KW_VIEW623=(Token)match(input,KW_VIEW,FOLLOW_KW_VIEW_in_dropViewStatement9528);  
            stream_KW_VIEW.add(KW_VIEW623);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1661:23: ( ifExists )?
            int alt182=2;
            switch ( input.LA(1) ) {
                case KW_IF:
                    {
                    alt182=1;
                    }
                    break;
            }

            switch (alt182) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1661:23: ifExists
                    {
                    pushFollow(FOLLOW_ifExists_in_dropViewStatement9530);
                    ifExists624=ifExists();

                    state._fsp--;

                    stream_ifExists.add(ifExists624.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_viewName_in_dropViewStatement9533);
            viewName625=viewName();

            state._fsp--;

            stream_viewName.add(viewName625.getTree());

            // AST REWRITE
            // elements: viewName, ifExists
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1661:42: -> ^( TOK_DROPVIEW viewName ( ifExists )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1661:45: ^( TOK_DROPVIEW viewName ( ifExists )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DROPVIEW, "TOK_DROPVIEW")
                , root_1);

                adaptor.addChild(root_1, stream_viewName.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1661:69: ( ifExists )?
                if ( stream_ifExists.hasNext() ) {
                    adaptor.addChild(root_1, stream_ifExists.nextTree());

                }
                stream_ifExists.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "dropViewStatement"


    public static class showFunctionIdentifier_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showFunctionIdentifier"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1664:1: showFunctionIdentifier : ( functionIdentifier | StringLiteral );
    public final HiveParser.showFunctionIdentifier_return showFunctionIdentifier() throws RecognitionException {
        HiveParser.showFunctionIdentifier_return retval = new HiveParser.showFunctionIdentifier_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token StringLiteral627=null;
        HiveParser_IdentifiersParser.functionIdentifier_return functionIdentifier626 =null;


        CommonTree StringLiteral627_tree=null;

         pushMsg("identifier for show function statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1667:5: ( functionIdentifier | StringLiteral )
            int alt183=2;
            switch ( input.LA(1) ) {
            case Identifier:
            case KW_ADD:
            case KW_ADMIN:
            case KW_AFTER:
            case KW_ALL:
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_ARCHIVE:
            case KW_ARRAY:
            case KW_AS:
            case KW_ASC:
            case KW_AUTHORIZATION:
            case KW_BEFORE:
            case KW_BETWEEN:
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_BOTH:
            case KW_BUCKET:
            case KW_BUCKETS:
            case KW_BY:
            case KW_CASCADE:
            case KW_CHANGE:
            case KW_CLUSTER:
            case KW_CLUSTERED:
            case KW_CLUSTERSTATUS:
            case KW_COLLECTION:
            case KW_COLUMNS:
            case KW_COMMENT:
            case KW_COMPACT:
            case KW_COMPACTIONS:
            case KW_COMPUTE:
            case KW_CONCATENATE:
            case KW_CONTINUE:
            case KW_CREATE:
            case KW_CUBE:
            case KW_CURSOR:
            case KW_DATA:
            case KW_DATABASES:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DBPROPERTIES:
            case KW_DECIMAL:
            case KW_DEFAULT:
            case KW_DEFERRED:
            case KW_DEFINED:
            case KW_DELETE:
            case KW_DELIMITED:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DIRECTORIES:
            case KW_DIRECTORY:
            case KW_DISABLE:
            case KW_DISTRIBUTE:
            case KW_DOUBLE:
            case KW_DROP:
            case KW_ELEM_TYPE:
            case KW_ENABLE:
            case KW_ESCAPED:
            case KW_EXCLUSIVE:
            case KW_EXISTS:
            case KW_EXPLAIN:
            case KW_EXPORT:
            case KW_EXTERNAL:
            case KW_FALSE:
            case KW_FETCH:
            case KW_FIELDS:
            case KW_FILE:
            case KW_FILEFORMAT:
            case KW_FIRST:
            case KW_FLOAT:
            case KW_FOR:
            case KW_FORMAT:
            case KW_FORMATTED:
            case KW_FULL:
            case KW_FUNCTIONS:
            case KW_GRANT:
            case KW_GROUP:
            case KW_GROUPING:
            case KW_HOLD_DDLTIME:
            case KW_IDXPROPERTIES:
            case KW_IGNORE:
            case KW_IMPORT:
            case KW_IN:
            case KW_INDEX:
            case KW_INDEXES:
            case KW_INNER:
            case KW_INPATH:
            case KW_INPUTDRIVER:
            case KW_INPUTFORMAT:
            case KW_INSERT:
            case KW_INT:
            case KW_INTERSECT:
            case KW_INTO:
            case KW_IS:
            case KW_ITEMS:
            case KW_JAR:
            case KW_KEYS:
            case KW_KEY_TYPE:
            case KW_LATERAL:
            case KW_LEFT:
            case KW_LIKE:
            case KW_LIMIT:
            case KW_LINES:
            case KW_LOAD:
            case KW_LOCAL:
            case KW_LOCATION:
            case KW_LOCK:
            case KW_LOCKS:
            case KW_LOGICAL:
            case KW_LONG:
            case KW_MAPJOIN:
            case KW_MATERIALIZED:
            case KW_MINUS:
            case KW_MSCK:
            case KW_NONE:
            case KW_NOSCAN:
            case KW_NO_DROP:
            case KW_NULL:
            case KW_OF:
            case KW_OFFLINE:
            case KW_OPTION:
            case KW_ORDER:
            case KW_OUT:
            case KW_OUTER:
            case KW_OUTPUTDRIVER:
            case KW_OUTPUTFORMAT:
            case KW_OVERWRITE:
            case KW_OWNER:
            case KW_PARTITION:
            case KW_PARTITIONED:
            case KW_PARTITIONS:
            case KW_PERCENT:
            case KW_PLUS:
            case KW_PRETTY:
            case KW_PRINCIPALS:
            case KW_PROCEDURE:
            case KW_PROTECTION:
            case KW_PURGE:
            case KW_RANGE:
            case KW_READ:
            case KW_READONLY:
            case KW_READS:
            case KW_REBUILD:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REGEXP:
            case KW_RENAME:
            case KW_REPAIR:
            case KW_REPLACE:
            case KW_RESTRICT:
            case KW_REVOKE:
            case KW_REWRITE:
            case KW_RIGHT:
            case KW_RLIKE:
            case KW_ROLE:
            case KW_ROLES:
            case KW_ROLLUP:
            case KW_ROW:
            case KW_ROWS:
            case KW_SCHEMA:
            case KW_SCHEMAS:
            case KW_SEMI:
            case KW_SERDE:
            case KW_SERDEPROPERTIES:
            case KW_SET:
            case KW_SETS:
            case KW_SHARED:
            case KW_SHOW:
            case KW_SHOW_DATABASE:
            case KW_SKEWED:
            case KW_SMALLINT:
            case KW_SORT:
            case KW_SORTED:
            case KW_SSL:
            case KW_STATISTICS:
            case KW_STORED:
            case KW_STREAMTABLE:
            case KW_STRING:
            case KW_STRUCT:
            case KW_TABLE:
            case KW_TABLES:
            case KW_TBLPROPERTIES:
            case KW_TEMPORARY:
            case KW_TERMINATED:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_TO:
            case KW_TOUCH:
            case KW_TRANSACTIONS:
            case KW_TRIGGER:
            case KW_TRUE:
            case KW_TRUNCATE:
            case KW_UNARCHIVE:
            case KW_UNDO:
            case KW_UNION:
            case KW_UNIONTYPE:
            case KW_UNLOCK:
            case KW_UNSET:
            case KW_UNSIGNED:
            case KW_UPDATE:
            case KW_USE:
            case KW_USER:
            case KW_USING:
            case KW_UTC:
            case KW_UTCTIMESTAMP:
            case KW_VALUES:
            case KW_VALUE_TYPE:
            case KW_VIEW:
            case KW_WHILE:
            case KW_WITH:
                {
                alt183=1;
                }
                break;
            case StringLiteral:
                {
                alt183=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 183, 0, input);

                throw nvae;

            }

            switch (alt183) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1667:7: functionIdentifier
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_functionIdentifier_in_showFunctionIdentifier9571);
                    functionIdentifier626=functionIdentifier();

                    state._fsp--;

                    adaptor.addChild(root_0, functionIdentifier626.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1668:7: StringLiteral
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    StringLiteral627=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showFunctionIdentifier9579); 
                    StringLiteral627_tree = 
                    (CommonTree)adaptor.create(StringLiteral627)
                    ;
                    adaptor.addChild(root_0, StringLiteral627_tree);


                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showFunctionIdentifier"


    public static class showStmtIdentifier_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "showStmtIdentifier"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1671:1: showStmtIdentifier : ( identifier | StringLiteral );
    public final HiveParser.showStmtIdentifier_return showStmtIdentifier() throws RecognitionException {
        HiveParser.showStmtIdentifier_return retval = new HiveParser.showStmtIdentifier_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token StringLiteral629=null;
        HiveParser_IdentifiersParser.identifier_return identifier628 =null;


        CommonTree StringLiteral629_tree=null;

         pushMsg("identifier for show statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1674:5: ( identifier | StringLiteral )
            int alt184=2;
            switch ( input.LA(1) ) {
            case Identifier:
            case KW_ADD:
            case KW_ADMIN:
            case KW_AFTER:
            case KW_ALL:
            case KW_ALTER:
            case KW_ANALYZE:
            case KW_ARCHIVE:
            case KW_ARRAY:
            case KW_AS:
            case KW_ASC:
            case KW_AUTHORIZATION:
            case KW_BEFORE:
            case KW_BETWEEN:
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_BOTH:
            case KW_BUCKET:
            case KW_BUCKETS:
            case KW_BY:
            case KW_CASCADE:
            case KW_CHANGE:
            case KW_CLUSTER:
            case KW_CLUSTERED:
            case KW_CLUSTERSTATUS:
            case KW_COLLECTION:
            case KW_COLUMNS:
            case KW_COMMENT:
            case KW_COMPACT:
            case KW_COMPACTIONS:
            case KW_COMPUTE:
            case KW_CONCATENATE:
            case KW_CONTINUE:
            case KW_CREATE:
            case KW_CUBE:
            case KW_CURSOR:
            case KW_DATA:
            case KW_DATABASES:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DBPROPERTIES:
            case KW_DECIMAL:
            case KW_DEFAULT:
            case KW_DEFERRED:
            case KW_DEFINED:
            case KW_DELETE:
            case KW_DELIMITED:
            case KW_DEPENDENCY:
            case KW_DESC:
            case KW_DESCRIBE:
            case KW_DIRECTORIES:
            case KW_DIRECTORY:
            case KW_DISABLE:
            case KW_DISTRIBUTE:
            case KW_DOUBLE:
            case KW_DROP:
            case KW_ELEM_TYPE:
            case KW_ENABLE:
            case KW_ESCAPED:
            case KW_EXCLUSIVE:
            case KW_EXISTS:
            case KW_EXPLAIN:
            case KW_EXPORT:
            case KW_EXTERNAL:
            case KW_FALSE:
            case KW_FETCH:
            case KW_FIELDS:
            case KW_FILE:
            case KW_FILEFORMAT:
            case KW_FIRST:
            case KW_FLOAT:
            case KW_FOR:
            case KW_FORMAT:
            case KW_FORMATTED:
            case KW_FULL:
            case KW_FUNCTIONS:
            case KW_GRANT:
            case KW_GROUP:
            case KW_GROUPING:
            case KW_HOLD_DDLTIME:
            case KW_IDXPROPERTIES:
            case KW_IGNORE:
            case KW_IMPORT:
            case KW_IN:
            case KW_INDEX:
            case KW_INDEXES:
            case KW_INNER:
            case KW_INPATH:
            case KW_INPUTDRIVER:
            case KW_INPUTFORMAT:
            case KW_INSERT:
            case KW_INT:
            case KW_INTERSECT:
            case KW_INTO:
            case KW_IS:
            case KW_ITEMS:
            case KW_JAR:
            case KW_KEYS:
            case KW_KEY_TYPE:
            case KW_LATERAL:
            case KW_LEFT:
            case KW_LIKE:
            case KW_LIMIT:
            case KW_LINES:
            case KW_LOAD:
            case KW_LOCAL:
            case KW_LOCATION:
            case KW_LOCK:
            case KW_LOCKS:
            case KW_LOGICAL:
            case KW_LONG:
            case KW_MAPJOIN:
            case KW_MATERIALIZED:
            case KW_MINUS:
            case KW_MSCK:
            case KW_NONE:
            case KW_NOSCAN:
            case KW_NO_DROP:
            case KW_NULL:
            case KW_OF:
            case KW_OFFLINE:
            case KW_OPTION:
            case KW_ORDER:
            case KW_OUT:
            case KW_OUTER:
            case KW_OUTPUTDRIVER:
            case KW_OUTPUTFORMAT:
            case KW_OVERWRITE:
            case KW_OWNER:
            case KW_PARTITION:
            case KW_PARTITIONED:
            case KW_PARTITIONS:
            case KW_PERCENT:
            case KW_PLUS:
            case KW_PRETTY:
            case KW_PRINCIPALS:
            case KW_PROCEDURE:
            case KW_PROTECTION:
            case KW_PURGE:
            case KW_RANGE:
            case KW_READ:
            case KW_READONLY:
            case KW_READS:
            case KW_REBUILD:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REGEXP:
            case KW_RENAME:
            case KW_REPAIR:
            case KW_REPLACE:
            case KW_RESTRICT:
            case KW_REVOKE:
            case KW_REWRITE:
            case KW_RIGHT:
            case KW_RLIKE:
            case KW_ROLE:
            case KW_ROLES:
            case KW_ROLLUP:
            case KW_ROW:
            case KW_ROWS:
            case KW_SCHEMA:
            case KW_SCHEMAS:
            case KW_SEMI:
            case KW_SERDE:
            case KW_SERDEPROPERTIES:
            case KW_SET:
            case KW_SETS:
            case KW_SHARED:
            case KW_SHOW:
            case KW_SHOW_DATABASE:
            case KW_SKEWED:
            case KW_SMALLINT:
            case KW_SORT:
            case KW_SORTED:
            case KW_SSL:
            case KW_STATISTICS:
            case KW_STORED:
            case KW_STREAMTABLE:
            case KW_STRING:
            case KW_STRUCT:
            case KW_TABLE:
            case KW_TABLES:
            case KW_TBLPROPERTIES:
            case KW_TEMPORARY:
            case KW_TERMINATED:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_TO:
            case KW_TOUCH:
            case KW_TRANSACTIONS:
            case KW_TRIGGER:
            case KW_TRUE:
            case KW_TRUNCATE:
            case KW_UNARCHIVE:
            case KW_UNDO:
            case KW_UNION:
            case KW_UNIONTYPE:
            case KW_UNLOCK:
            case KW_UNSET:
            case KW_UNSIGNED:
            case KW_UPDATE:
            case KW_USE:
            case KW_USER:
            case KW_USING:
            case KW_UTC:
            case KW_UTCTIMESTAMP:
            case KW_VALUES:
            case KW_VALUE_TYPE:
            case KW_VIEW:
            case KW_WHILE:
            case KW_WITH:
                {
                alt184=1;
                }
                break;
            case StringLiteral:
                {
                alt184=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 184, 0, input);

                throw nvae;

            }

            switch (alt184) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1674:7: identifier
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_identifier_in_showStmtIdentifier9606);
                    identifier628=identifier();

                    state._fsp--;

                    adaptor.addChild(root_0, identifier628.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1675:7: StringLiteral
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    StringLiteral629=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_showStmtIdentifier9614); 
                    StringLiteral629_tree = 
                    (CommonTree)adaptor.create(StringLiteral629)
                    ;
                    adaptor.addChild(root_0, StringLiteral629_tree);


                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "showStmtIdentifier"


    public static class tableComment_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableComment"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1678:1: tableComment : KW_COMMENT comment= StringLiteral -> ^( TOK_TABLECOMMENT $comment) ;
    public final HiveParser.tableComment_return tableComment() throws RecognitionException {
        HiveParser.tableComment_return retval = new HiveParser.tableComment_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_COMMENT630=null;

        CommonTree comment_tree=null;
        CommonTree KW_COMMENT630_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");

         pushMsg("table's comment", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1681:5: ( KW_COMMENT comment= StringLiteral -> ^( TOK_TABLECOMMENT $comment) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1682:7: KW_COMMENT comment= StringLiteral
            {
            KW_COMMENT630=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_tableComment9647);  
            stream_KW_COMMENT.add(KW_COMMENT630);


            comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableComment9651);  
            stream_StringLiteral.add(comment);


            // AST REWRITE
            // elements: comment
            // token labels: comment
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1682:41: -> ^( TOK_TABLECOMMENT $comment)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1682:44: ^( TOK_TABLECOMMENT $comment)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLECOMMENT, "TOK_TABLECOMMENT")
                , root_1);

                adaptor.addChild(root_1, stream_comment.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableComment"


    public static class tablePartition_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tablePartition"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1685:1: tablePartition : KW_PARTITIONED KW_BY LPAREN columnNameTypeList RPAREN -> ^( TOK_TABLEPARTCOLS columnNameTypeList ) ;
    public final HiveParser.tablePartition_return tablePartition() throws RecognitionException {
        HiveParser.tablePartition_return retval = new HiveParser.tablePartition_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_PARTITIONED631=null;
        Token KW_BY632=null;
        Token LPAREN633=null;
        Token RPAREN635=null;
        HiveParser.columnNameTypeList_return columnNameTypeList634 =null;


        CommonTree KW_PARTITIONED631_tree=null;
        CommonTree KW_BY632_tree=null;
        CommonTree LPAREN633_tree=null;
        CommonTree RPAREN635_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_PARTITIONED=new RewriteRuleTokenStream(adaptor,"token KW_PARTITIONED");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
        RewriteRuleSubtreeStream stream_columnNameTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameTypeList");
         pushMsg("table partition specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1688:5: ( KW_PARTITIONED KW_BY LPAREN columnNameTypeList RPAREN -> ^( TOK_TABLEPARTCOLS columnNameTypeList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1688:7: KW_PARTITIONED KW_BY LPAREN columnNameTypeList RPAREN
            {
            KW_PARTITIONED631=(Token)match(input,KW_PARTITIONED,FOLLOW_KW_PARTITIONED_in_tablePartition9688);  
            stream_KW_PARTITIONED.add(KW_PARTITIONED631);


            KW_BY632=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tablePartition9690);  
            stream_KW_BY.add(KW_BY632);


            LPAREN633=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tablePartition9692);  
            stream_LPAREN.add(LPAREN633);


            pushFollow(FOLLOW_columnNameTypeList_in_tablePartition9694);
            columnNameTypeList634=columnNameTypeList();

            state._fsp--;

            stream_columnNameTypeList.add(columnNameTypeList634.getTree());

            RPAREN635=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tablePartition9696);  
            stream_RPAREN.add(RPAREN635);


            // AST REWRITE
            // elements: columnNameTypeList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1689:5: -> ^( TOK_TABLEPARTCOLS columnNameTypeList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1689:8: ^( TOK_TABLEPARTCOLS columnNameTypeList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEPARTCOLS, "TOK_TABLEPARTCOLS")
                , root_1);

                adaptor.addChild(root_1, stream_columnNameTypeList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tablePartition"


    public static class tableBuckets_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableBuckets"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1692:1: tableBuckets : KW_CLUSTERED KW_BY LPAREN bucketCols= columnNameList RPAREN ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )? KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num) ;
    public final HiveParser.tableBuckets_return tableBuckets() throws RecognitionException {
        HiveParser.tableBuckets_return retval = new HiveParser.tableBuckets_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token num=null;
        Token KW_CLUSTERED636=null;
        Token KW_BY637=null;
        Token LPAREN638=null;
        Token RPAREN639=null;
        Token KW_SORTED640=null;
        Token KW_BY641=null;
        Token LPAREN642=null;
        Token RPAREN643=null;
        Token KW_INTO644=null;
        Token KW_BUCKETS645=null;
        HiveParser.columnNameList_return bucketCols =null;

        HiveParser.columnNameOrderList_return sortCols =null;


        CommonTree num_tree=null;
        CommonTree KW_CLUSTERED636_tree=null;
        CommonTree KW_BY637_tree=null;
        CommonTree LPAREN638_tree=null;
        CommonTree RPAREN639_tree=null;
        CommonTree KW_SORTED640_tree=null;
        CommonTree KW_BY641_tree=null;
        CommonTree LPAREN642_tree=null;
        CommonTree RPAREN643_tree=null;
        CommonTree KW_INTO644_tree=null;
        CommonTree KW_BUCKETS645_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
        RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
        RewriteRuleTokenStream stream_KW_BUCKETS=new RewriteRuleTokenStream(adaptor,"token KW_BUCKETS");
        RewriteRuleTokenStream stream_KW_CLUSTERED=new RewriteRuleTokenStream(adaptor,"token KW_CLUSTERED");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
        RewriteRuleTokenStream stream_KW_SORTED=new RewriteRuleTokenStream(adaptor,"token KW_SORTED");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
        RewriteRuleSubtreeStream stream_columnNameOrderList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameOrderList");
         pushMsg("table buckets specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1695:5: ( KW_CLUSTERED KW_BY LPAREN bucketCols= columnNameList RPAREN ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )? KW_INTO num= Number KW_BUCKETS -> ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1696:7: KW_CLUSTERED KW_BY LPAREN bucketCols= columnNameList RPAREN ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )? KW_INTO num= Number KW_BUCKETS
            {
            KW_CLUSTERED636=(Token)match(input,KW_CLUSTERED,FOLLOW_KW_CLUSTERED_in_tableBuckets9741);  
            stream_KW_CLUSTERED.add(KW_CLUSTERED636);


            KW_BY637=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableBuckets9743);  
            stream_KW_BY.add(KW_BY637);


            LPAREN638=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableBuckets9745);  
            stream_LPAREN.add(LPAREN638);


            pushFollow(FOLLOW_columnNameList_in_tableBuckets9749);
            bucketCols=columnNameList();

            state._fsp--;

            stream_columnNameList.add(bucketCols.getTree());

            RPAREN639=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableBuckets9751);  
            stream_RPAREN.add(RPAREN639);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1696:66: ( KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN )?
            int alt185=2;
            switch ( input.LA(1) ) {
                case KW_SORTED:
                    {
                    alt185=1;
                    }
                    break;
            }

            switch (alt185) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1696:67: KW_SORTED KW_BY LPAREN sortCols= columnNameOrderList RPAREN
                    {
                    KW_SORTED640=(Token)match(input,KW_SORTED,FOLLOW_KW_SORTED_in_tableBuckets9754);  
                    stream_KW_SORTED.add(KW_SORTED640);


                    KW_BY641=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableBuckets9756);  
                    stream_KW_BY.add(KW_BY641);


                    LPAREN642=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableBuckets9758);  
                    stream_LPAREN.add(LPAREN642);


                    pushFollow(FOLLOW_columnNameOrderList_in_tableBuckets9762);
                    sortCols=columnNameOrderList();

                    state._fsp--;

                    stream_columnNameOrderList.add(sortCols.getTree());

                    RPAREN643=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableBuckets9764);  
                    stream_RPAREN.add(RPAREN643);


                    }
                    break;

            }


            KW_INTO644=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_tableBuckets9768);  
            stream_KW_INTO.add(KW_INTO644);


            num=(Token)match(input,Number,FOLLOW_Number_in_tableBuckets9772);  
            stream_Number.add(num);


            KW_BUCKETS645=(Token)match(input,KW_BUCKETS,FOLLOW_KW_BUCKETS_in_tableBuckets9774);  
            stream_KW_BUCKETS.add(KW_BUCKETS645);


            // AST REWRITE
            // elements: bucketCols, sortCols, num
            // token labels: num
            // rule labels: sortCols, retval, bucketCols
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_num=new RewriteRuleTokenStream(adaptor,"token num",num);
            RewriteRuleSubtreeStream stream_sortCols=new RewriteRuleSubtreeStream(adaptor,"rule sortCols",sortCols!=null?sortCols.tree:null);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_bucketCols=new RewriteRuleSubtreeStream(adaptor,"rule bucketCols",bucketCols!=null?bucketCols.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1697:5: -> ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1697:8: ^( TOK_ALTERTABLE_BUCKETS $bucketCols ( $sortCols)? $num)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_ALTERTABLE_BUCKETS, "TOK_ALTERTABLE_BUCKETS")
                , root_1);

                adaptor.addChild(root_1, stream_bucketCols.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1697:46: ( $sortCols)?
                if ( stream_sortCols.hasNext() ) {
                    adaptor.addChild(root_1, stream_sortCols.nextTree());

                }
                stream_sortCols.reset();

                adaptor.addChild(root_1, stream_num.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableBuckets"


    public static class tableSkewed_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableSkewed"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1700:1: tableSkewed : KW_SKEWED KW_BY LPAREN skewedCols= columnNameList RPAREN KW_ON LPAREN (skewedValues= skewedValueElement ) RPAREN ( storedAsDirs )? -> ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? ) ;
    public final HiveParser.tableSkewed_return tableSkewed() throws RecognitionException {
        HiveParser.tableSkewed_return retval = new HiveParser.tableSkewed_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SKEWED646=null;
        Token KW_BY647=null;
        Token LPAREN648=null;
        Token RPAREN649=null;
        Token KW_ON650=null;
        Token LPAREN651=null;
        Token RPAREN652=null;
        HiveParser.columnNameList_return skewedCols =null;

        HiveParser.skewedValueElement_return skewedValues =null;

        HiveParser.storedAsDirs_return storedAsDirs653 =null;


        CommonTree KW_SKEWED646_tree=null;
        CommonTree KW_BY647_tree=null;
        CommonTree LPAREN648_tree=null;
        CommonTree RPAREN649_tree=null;
        CommonTree KW_ON650_tree=null;
        CommonTree LPAREN651_tree=null;
        CommonTree RPAREN652_tree=null;
        RewriteRuleTokenStream stream_KW_SKEWED=new RewriteRuleTokenStream(adaptor,"token KW_SKEWED");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleTokenStream stream_KW_ON=new RewriteRuleTokenStream(adaptor,"token KW_ON");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
        RewriteRuleSubtreeStream stream_columnNameList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameList");
        RewriteRuleSubtreeStream stream_storedAsDirs=new RewriteRuleSubtreeStream(adaptor,"rule storedAsDirs");
        RewriteRuleSubtreeStream stream_skewedValueElement=new RewriteRuleSubtreeStream(adaptor,"rule skewedValueElement");
         pushMsg("table skewed specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1703:5: ( KW_SKEWED KW_BY LPAREN skewedCols= columnNameList RPAREN KW_ON LPAREN (skewedValues= skewedValueElement ) RPAREN ( storedAsDirs )? -> ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:6: KW_SKEWED KW_BY LPAREN skewedCols= columnNameList RPAREN KW_ON LPAREN (skewedValues= skewedValueElement ) RPAREN ( storedAsDirs )?
            {
            KW_SKEWED646=(Token)match(input,KW_SKEWED,FOLLOW_KW_SKEWED_in_tableSkewed9826);  
            stream_KW_SKEWED.add(KW_SKEWED646);


            KW_BY647=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableSkewed9828);  
            stream_KW_BY.add(KW_BY647);


            LPAREN648=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableSkewed9830);  
            stream_LPAREN.add(LPAREN648);


            pushFollow(FOLLOW_columnNameList_in_tableSkewed9834);
            skewedCols=columnNameList();

            state._fsp--;

            stream_columnNameList.add(skewedCols.getTree());

            RPAREN649=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableSkewed9836);  
            stream_RPAREN.add(RPAREN649);


            KW_ON650=(Token)match(input,KW_ON,FOLLOW_KW_ON_in_tableSkewed9838);  
            stream_KW_ON.add(KW_ON650);


            LPAREN651=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableSkewed9840);  
            stream_LPAREN.add(LPAREN651);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:75: (skewedValues= skewedValueElement )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:76: skewedValues= skewedValueElement
            {
            pushFollow(FOLLOW_skewedValueElement_in_tableSkewed9845);
            skewedValues=skewedValueElement();

            state._fsp--;

            stream_skewedValueElement.add(skewedValues.getTree());

            }


            RPAREN652=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableSkewed9848);  
            stream_RPAREN.add(RPAREN652);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:116: ( storedAsDirs )?
            int alt186=2;
            switch ( input.LA(1) ) {
                case KW_STORED:
                    {
                    switch ( input.LA(2) ) {
                        case KW_AS:
                            {
                            switch ( input.LA(3) ) {
                                case KW_DIRECTORIES:
                                    {
                                    alt186=1;
                                    }
                                    break;
                            }

                            }
                            break;
                    }

                    }
                    break;
            }

            switch (alt186) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1704:117: storedAsDirs
                    {
                    pushFollow(FOLLOW_storedAsDirs_in_tableSkewed9851);
                    storedAsDirs653=storedAsDirs();

                    state._fsp--;

                    stream_storedAsDirs.add(storedAsDirs653.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: storedAsDirs, skewedValues, skewedCols
            // token labels: 
            // rule labels: retval, skewedValues, skewedCols
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_skewedValues=new RewriteRuleSubtreeStream(adaptor,"rule skewedValues",skewedValues!=null?skewedValues.tree:null);
            RewriteRuleSubtreeStream stream_skewedCols=new RewriteRuleSubtreeStream(adaptor,"rule skewedCols",skewedCols!=null?skewedCols.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1705:5: -> ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1705:8: ^( TOK_TABLESKEWED $skewedCols $skewedValues ( storedAsDirs )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLESKEWED, "TOK_TABLESKEWED")
                , root_1);

                adaptor.addChild(root_1, stream_skewedCols.nextTree());

                adaptor.addChild(root_1, stream_skewedValues.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1705:52: ( storedAsDirs )?
                if ( stream_storedAsDirs.hasNext() ) {
                    adaptor.addChild(root_1, stream_storedAsDirs.nextTree());

                }
                stream_storedAsDirs.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableSkewed"


    public static class rowFormat_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "rowFormat"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1708:1: rowFormat : ( rowFormatSerde -> ^( TOK_SERDE rowFormatSerde ) | rowFormatDelimited -> ^( TOK_SERDE rowFormatDelimited ) | -> ^( TOK_SERDE ) );
    public final HiveParser.rowFormat_return rowFormat() throws RecognitionException {
        HiveParser.rowFormat_return retval = new HiveParser.rowFormat_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.rowFormatSerde_return rowFormatSerde654 =null;

        HiveParser.rowFormatDelimited_return rowFormatDelimited655 =null;


        RewriteRuleSubtreeStream stream_rowFormatSerde=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatSerde");
        RewriteRuleSubtreeStream stream_rowFormatDelimited=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatDelimited");
         pushMsg("serde specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1711:5: ( rowFormatSerde -> ^( TOK_SERDE rowFormatSerde ) | rowFormatDelimited -> ^( TOK_SERDE rowFormatDelimited ) | -> ^( TOK_SERDE ) )
            int alt187=3;
            switch ( input.LA(1) ) {
            case KW_ROW:
                {
                switch ( input.LA(2) ) {
                case KW_FORMAT:
                    {
                    switch ( input.LA(3) ) {
                    case KW_SERDE:
                        {
                        alt187=1;
                        }
                        break;
                    case KW_DELIMITED:
                        {
                        alt187=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 187, 23, input);

                        throw nvae;

                    }

                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 187, 1, input);

                    throw nvae;

                }

                }
                break;
            case EOF:
            case KW_CLUSTER:
            case KW_DISTRIBUTE:
            case KW_FROM:
            case KW_GROUP:
            case KW_HAVING:
            case KW_INSERT:
            case KW_LATERAL:
            case KW_LIMIT:
            case KW_MAP:
            case KW_ORDER:
            case KW_RECORDREADER:
            case KW_RECORDWRITER:
            case KW_REDUCE:
            case KW_SELECT:
            case KW_SORT:
            case KW_UNION:
            case KW_USING:
            case KW_WHERE:
            case KW_WINDOW:
            case RPAREN:
                {
                alt187=3;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 187, 0, input);

                throw nvae;

            }

            switch (alt187) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1711:7: rowFormatSerde
                    {
                    pushFollow(FOLLOW_rowFormatSerde_in_rowFormat9899);
                    rowFormatSerde654=rowFormatSerde();

                    state._fsp--;

                    stream_rowFormatSerde.add(rowFormatSerde654.getTree());

                    // AST REWRITE
                    // elements: rowFormatSerde
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1711:22: -> ^( TOK_SERDE rowFormatSerde )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1711:25: ^( TOK_SERDE rowFormatSerde )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SERDE, "TOK_SERDE")
                        , root_1);

                        adaptor.addChild(root_1, stream_rowFormatSerde.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1712:7: rowFormatDelimited
                    {
                    pushFollow(FOLLOW_rowFormatDelimited_in_rowFormat9915);
                    rowFormatDelimited655=rowFormatDelimited();

                    state._fsp--;

                    stream_rowFormatDelimited.add(rowFormatDelimited655.getTree());

                    // AST REWRITE
                    // elements: rowFormatDelimited
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1712:26: -> ^( TOK_SERDE rowFormatDelimited )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1712:29: ^( TOK_SERDE rowFormatDelimited )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SERDE, "TOK_SERDE")
                        , root_1);

                        adaptor.addChild(root_1, stream_rowFormatDelimited.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:9: 
                    {
                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1713:9: -> ^( TOK_SERDE )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1713:12: ^( TOK_SERDE )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_SERDE, "TOK_SERDE")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "rowFormat"


    public static class recordReader_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "recordReader"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1716:1: recordReader : ( KW_RECORDREADER StringLiteral -> ^( TOK_RECORDREADER StringLiteral ) | -> ^( TOK_RECORDREADER ) );
    public final HiveParser.recordReader_return recordReader() throws RecognitionException {
        HiveParser.recordReader_return retval = new HiveParser.recordReader_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_RECORDREADER656=null;
        Token StringLiteral657=null;

        CommonTree KW_RECORDREADER656_tree=null;
        CommonTree StringLiteral657_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_RECORDREADER=new RewriteRuleTokenStream(adaptor,"token KW_RECORDREADER");

         pushMsg("record reader specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1719:5: ( KW_RECORDREADER StringLiteral -> ^( TOK_RECORDREADER StringLiteral ) | -> ^( TOK_RECORDREADER ) )
            int alt188=2;
            switch ( input.LA(1) ) {
            case KW_RECORDREADER:
                {
                alt188=1;
                }
                break;
            case EOF:
            case KW_CLUSTER:
            case KW_DISTRIBUTE:
            case KW_FROM:
            case KW_GROUP:
            case KW_HAVING:
            case KW_INSERT:
            case KW_LATERAL:
            case KW_LIMIT:
            case KW_MAP:
            case KW_ORDER:
            case KW_REDUCE:
            case KW_SELECT:
            case KW_SORT:
            case KW_UNION:
            case KW_WHERE:
            case KW_WINDOW:
            case RPAREN:
                {
                alt188=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 188, 0, input);

                throw nvae;

            }

            switch (alt188) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1719:7: KW_RECORDREADER StringLiteral
                    {
                    KW_RECORDREADER656=(Token)match(input,KW_RECORDREADER,FOLLOW_KW_RECORDREADER_in_recordReader9964);  
                    stream_KW_RECORDREADER.add(KW_RECORDREADER656);


                    StringLiteral657=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_recordReader9966);  
                    stream_StringLiteral.add(StringLiteral657);


                    // AST REWRITE
                    // elements: StringLiteral
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1719:37: -> ^( TOK_RECORDREADER StringLiteral )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1719:40: ^( TOK_RECORDREADER StringLiteral )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_RECORDREADER, "TOK_RECORDREADER")
                        , root_1);

                        adaptor.addChild(root_1, 
                        stream_StringLiteral.nextNode()
                        );

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1720:9: 
                    {
                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1720:9: -> ^( TOK_RECORDREADER )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1720:12: ^( TOK_RECORDREADER )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_RECORDREADER, "TOK_RECORDREADER")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "recordReader"


    public static class recordWriter_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "recordWriter"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1723:1: recordWriter : ( KW_RECORDWRITER StringLiteral -> ^( TOK_RECORDWRITER StringLiteral ) | -> ^( TOK_RECORDWRITER ) );
    public final HiveParser.recordWriter_return recordWriter() throws RecognitionException {
        HiveParser.recordWriter_return retval = new HiveParser.recordWriter_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_RECORDWRITER658=null;
        Token StringLiteral659=null;

        CommonTree KW_RECORDWRITER658_tree=null;
        CommonTree StringLiteral659_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_RECORDWRITER=new RewriteRuleTokenStream(adaptor,"token KW_RECORDWRITER");

         pushMsg("record writer specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1726:5: ( KW_RECORDWRITER StringLiteral -> ^( TOK_RECORDWRITER StringLiteral ) | -> ^( TOK_RECORDWRITER ) )
            int alt189=2;
            switch ( input.LA(1) ) {
            case KW_RECORDWRITER:
                {
                alt189=1;
                }
                break;
            case KW_USING:
                {
                alt189=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 189, 0, input);

                throw nvae;

            }

            switch (alt189) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1726:7: KW_RECORDWRITER StringLiteral
                    {
                    KW_RECORDWRITER658=(Token)match(input,KW_RECORDWRITER,FOLLOW_KW_RECORDWRITER_in_recordWriter10015);  
                    stream_KW_RECORDWRITER.add(KW_RECORDWRITER658);


                    StringLiteral659=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_recordWriter10017);  
                    stream_StringLiteral.add(StringLiteral659);


                    // AST REWRITE
                    // elements: StringLiteral
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1726:37: -> ^( TOK_RECORDWRITER StringLiteral )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1726:40: ^( TOK_RECORDWRITER StringLiteral )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_RECORDWRITER, "TOK_RECORDWRITER")
                        , root_1);

                        adaptor.addChild(root_1, 
                        stream_StringLiteral.nextNode()
                        );

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1727:9: 
                    {
                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1727:9: -> ^( TOK_RECORDWRITER )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1727:12: ^( TOK_RECORDWRITER )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_RECORDWRITER, "TOK_RECORDWRITER")
                        , root_1);

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "recordWriter"


    public static class rowFormatSerde_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "rowFormatSerde"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1730:1: rowFormatSerde : KW_ROW KW_FORMAT KW_SERDE name= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_SERDENAME $name ( $serdeprops)? ) ;
    public final HiveParser.rowFormatSerde_return rowFormatSerde() throws RecognitionException {
        HiveParser.rowFormatSerde_return retval = new HiveParser.rowFormatSerde_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token name=null;
        Token KW_ROW660=null;
        Token KW_FORMAT661=null;
        Token KW_SERDE662=null;
        Token KW_WITH663=null;
        Token KW_SERDEPROPERTIES664=null;
        HiveParser.tableProperties_return serdeprops =null;


        CommonTree name_tree=null;
        CommonTree KW_ROW660_tree=null;
        CommonTree KW_FORMAT661_tree=null;
        CommonTree KW_SERDE662_tree=null;
        CommonTree KW_WITH663_tree=null;
        CommonTree KW_SERDEPROPERTIES664_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_ROW=new RewriteRuleTokenStream(adaptor,"token KW_ROW");
        RewriteRuleTokenStream stream_KW_FORMAT=new RewriteRuleTokenStream(adaptor,"token KW_FORMAT");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleTokenStream stream_KW_SERDE=new RewriteRuleTokenStream(adaptor,"token KW_SERDE");
        RewriteRuleTokenStream stream_KW_SERDEPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_SERDEPROPERTIES");
        RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
         pushMsg("serde format specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1733:5: ( KW_ROW KW_FORMAT KW_SERDE name= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_SERDENAME $name ( $serdeprops)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1733:7: KW_ROW KW_FORMAT KW_SERDE name= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
            {
            KW_ROW660=(Token)match(input,KW_ROW,FOLLOW_KW_ROW_in_rowFormatSerde10066);  
            stream_KW_ROW.add(KW_ROW660);


            KW_FORMAT661=(Token)match(input,KW_FORMAT,FOLLOW_KW_FORMAT_in_rowFormatSerde10068);  
            stream_KW_FORMAT.add(KW_FORMAT661);


            KW_SERDE662=(Token)match(input,KW_SERDE,FOLLOW_KW_SERDE_in_rowFormatSerde10070);  
            stream_KW_SERDE.add(KW_SERDE662);


            name=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_rowFormatSerde10074);  
            stream_StringLiteral.add(name);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1733:52: ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
            int alt190=2;
            switch ( input.LA(1) ) {
                case KW_WITH:
                    {
                    alt190=1;
                    }
                    break;
            }

            switch (alt190) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1733:53: KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties
                    {
                    KW_WITH663=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_rowFormatSerde10077);  
                    stream_KW_WITH.add(KW_WITH663);


                    KW_SERDEPROPERTIES664=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_rowFormatSerde10079);  
                    stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES664);


                    pushFollow(FOLLOW_tableProperties_in_rowFormatSerde10083);
                    serdeprops=tableProperties();

                    state._fsp--;

                    stream_tableProperties.add(serdeprops.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: name, serdeprops
            // token labels: name
            // rule labels: serdeprops, retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_name=new RewriteRuleTokenStream(adaptor,"token name",name);
            RewriteRuleSubtreeStream stream_serdeprops=new RewriteRuleSubtreeStream(adaptor,"rule serdeprops",serdeprops!=null?serdeprops.tree:null);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1734:5: -> ^( TOK_SERDENAME $name ( $serdeprops)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1734:8: ^( TOK_SERDENAME $name ( $serdeprops)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SERDENAME, "TOK_SERDENAME")
                , root_1);

                adaptor.addChild(root_1, stream_name.nextNode());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1734:31: ( $serdeprops)?
                if ( stream_serdeprops.hasNext() ) {
                    adaptor.addChild(root_1, stream_serdeprops.nextTree());

                }
                stream_serdeprops.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "rowFormatSerde"


    public static class rowFormatDelimited_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "rowFormatDelimited"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1737:1: rowFormatDelimited : KW_ROW KW_FORMAT KW_DELIMITED ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? -> ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? ) ;
    public final HiveParser.rowFormatDelimited_return rowFormatDelimited() throws RecognitionException {
        HiveParser.rowFormatDelimited_return retval = new HiveParser.rowFormatDelimited_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ROW665=null;
        Token KW_FORMAT666=null;
        Token KW_DELIMITED667=null;
        HiveParser.tableRowFormatFieldIdentifier_return tableRowFormatFieldIdentifier668 =null;

        HiveParser.tableRowFormatCollItemsIdentifier_return tableRowFormatCollItemsIdentifier669 =null;

        HiveParser.tableRowFormatMapKeysIdentifier_return tableRowFormatMapKeysIdentifier670 =null;

        HiveParser.tableRowFormatLinesIdentifier_return tableRowFormatLinesIdentifier671 =null;

        HiveParser.tableRowNullFormat_return tableRowNullFormat672 =null;


        CommonTree KW_ROW665_tree=null;
        CommonTree KW_FORMAT666_tree=null;
        CommonTree KW_DELIMITED667_tree=null;
        RewriteRuleTokenStream stream_KW_DELIMITED=new RewriteRuleTokenStream(adaptor,"token KW_DELIMITED");
        RewriteRuleTokenStream stream_KW_ROW=new RewriteRuleTokenStream(adaptor,"token KW_ROW");
        RewriteRuleTokenStream stream_KW_FORMAT=new RewriteRuleTokenStream(adaptor,"token KW_FORMAT");
        RewriteRuleSubtreeStream stream_tableRowFormatMapKeysIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatMapKeysIdentifier");
        RewriteRuleSubtreeStream stream_tableRowFormatFieldIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatFieldIdentifier");
        RewriteRuleSubtreeStream stream_tableRowFormatCollItemsIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatCollItemsIdentifier");
        RewriteRuleSubtreeStream stream_tableRowFormatLinesIdentifier=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormatLinesIdentifier");
        RewriteRuleSubtreeStream stream_tableRowNullFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowNullFormat");
         pushMsg("serde properties specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1740:5: ( KW_ROW KW_FORMAT KW_DELIMITED ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? -> ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:7: KW_ROW KW_FORMAT KW_DELIMITED ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )?
            {
            KW_ROW665=(Token)match(input,KW_ROW,FOLLOW_KW_ROW_in_rowFormatDelimited10135);  
            stream_KW_ROW.add(KW_ROW665);


            KW_FORMAT666=(Token)match(input,KW_FORMAT,FOLLOW_KW_FORMAT_in_rowFormatDelimited10137);  
            stream_KW_FORMAT.add(KW_FORMAT666);


            KW_DELIMITED667=(Token)match(input,KW_DELIMITED,FOLLOW_KW_DELIMITED_in_rowFormatDelimited10139);  
            stream_KW_DELIMITED.add(KW_DELIMITED667);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:37: ( tableRowFormatFieldIdentifier )?
            int alt191=2;
            switch ( input.LA(1) ) {
                case KW_FIELDS:
                    {
                    alt191=1;
                    }
                    break;
            }

            switch (alt191) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:37: tableRowFormatFieldIdentifier
                    {
                    pushFollow(FOLLOW_tableRowFormatFieldIdentifier_in_rowFormatDelimited10141);
                    tableRowFormatFieldIdentifier668=tableRowFormatFieldIdentifier();

                    state._fsp--;

                    stream_tableRowFormatFieldIdentifier.add(tableRowFormatFieldIdentifier668.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:68: ( tableRowFormatCollItemsIdentifier )?
            int alt192=2;
            switch ( input.LA(1) ) {
                case KW_COLLECTION:
                    {
                    alt192=1;
                    }
                    break;
            }

            switch (alt192) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:68: tableRowFormatCollItemsIdentifier
                    {
                    pushFollow(FOLLOW_tableRowFormatCollItemsIdentifier_in_rowFormatDelimited10144);
                    tableRowFormatCollItemsIdentifier669=tableRowFormatCollItemsIdentifier();

                    state._fsp--;

                    stream_tableRowFormatCollItemsIdentifier.add(tableRowFormatCollItemsIdentifier669.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:103: ( tableRowFormatMapKeysIdentifier )?
            int alt193=2;
            alt193 = dfa193.predict(input);
            switch (alt193) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:103: tableRowFormatMapKeysIdentifier
                    {
                    pushFollow(FOLLOW_tableRowFormatMapKeysIdentifier_in_rowFormatDelimited10147);
                    tableRowFormatMapKeysIdentifier670=tableRowFormatMapKeysIdentifier();

                    state._fsp--;

                    stream_tableRowFormatMapKeysIdentifier.add(tableRowFormatMapKeysIdentifier670.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:136: ( tableRowFormatLinesIdentifier )?
            int alt194=2;
            switch ( input.LA(1) ) {
                case KW_LINES:
                    {
                    alt194=1;
                    }
                    break;
            }

            switch (alt194) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:136: tableRowFormatLinesIdentifier
                    {
                    pushFollow(FOLLOW_tableRowFormatLinesIdentifier_in_rowFormatDelimited10150);
                    tableRowFormatLinesIdentifier671=tableRowFormatLinesIdentifier();

                    state._fsp--;

                    stream_tableRowFormatLinesIdentifier.add(tableRowFormatLinesIdentifier671.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:167: ( tableRowNullFormat )?
            int alt195=2;
            switch ( input.LA(1) ) {
                case KW_NULL:
                    {
                    alt195=1;
                    }
                    break;
            }

            switch (alt195) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1741:167: tableRowNullFormat
                    {
                    pushFollow(FOLLOW_tableRowNullFormat_in_rowFormatDelimited10153);
                    tableRowNullFormat672=tableRowNullFormat();

                    state._fsp--;

                    stream_tableRowNullFormat.add(tableRowNullFormat672.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: tableRowFormatMapKeysIdentifier, tableRowFormatFieldIdentifier, tableRowFormatCollItemsIdentifier, tableRowNullFormat, tableRowFormatLinesIdentifier
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1742:5: -> ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:8: ^( TOK_SERDEPROPS ( tableRowFormatFieldIdentifier )? ( tableRowFormatCollItemsIdentifier )? ( tableRowFormatMapKeysIdentifier )? ( tableRowFormatLinesIdentifier )? ( tableRowNullFormat )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SERDEPROPS, "TOK_SERDEPROPS")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:25: ( tableRowFormatFieldIdentifier )?
                if ( stream_tableRowFormatFieldIdentifier.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableRowFormatFieldIdentifier.nextTree());

                }
                stream_tableRowFormatFieldIdentifier.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:56: ( tableRowFormatCollItemsIdentifier )?
                if ( stream_tableRowFormatCollItemsIdentifier.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableRowFormatCollItemsIdentifier.nextTree());

                }
                stream_tableRowFormatCollItemsIdentifier.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:91: ( tableRowFormatMapKeysIdentifier )?
                if ( stream_tableRowFormatMapKeysIdentifier.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableRowFormatMapKeysIdentifier.nextTree());

                }
                stream_tableRowFormatMapKeysIdentifier.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:124: ( tableRowFormatLinesIdentifier )?
                if ( stream_tableRowFormatLinesIdentifier.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableRowFormatLinesIdentifier.nextTree());

                }
                stream_tableRowFormatLinesIdentifier.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1742:155: ( tableRowNullFormat )?
                if ( stream_tableRowNullFormat.hasNext() ) {
                    adaptor.addChild(root_1, stream_tableRowNullFormat.nextTree());

                }
                stream_tableRowNullFormat.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "rowFormatDelimited"


    public static class tableRowFormat_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableRowFormat"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1745:1: tableRowFormat : ( rowFormatDelimited -> ^( TOK_TABLEROWFORMAT rowFormatDelimited ) | rowFormatSerde -> ^( TOK_TABLESERIALIZER rowFormatSerde ) );
    public final HiveParser.tableRowFormat_return tableRowFormat() throws RecognitionException {
        HiveParser.tableRowFormat_return retval = new HiveParser.tableRowFormat_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.rowFormatDelimited_return rowFormatDelimited673 =null;

        HiveParser.rowFormatSerde_return rowFormatSerde674 =null;


        RewriteRuleSubtreeStream stream_rowFormatSerde=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatSerde");
        RewriteRuleSubtreeStream stream_rowFormatDelimited=new RewriteRuleSubtreeStream(adaptor,"rule rowFormatDelimited");
         pushMsg("table row format specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1748:5: ( rowFormatDelimited -> ^( TOK_TABLEROWFORMAT rowFormatDelimited ) | rowFormatSerde -> ^( TOK_TABLESERIALIZER rowFormatSerde ) )
            int alt196=2;
            switch ( input.LA(1) ) {
            case KW_ROW:
                {
                switch ( input.LA(2) ) {
                case KW_FORMAT:
                    {
                    switch ( input.LA(3) ) {
                    case KW_DELIMITED:
                        {
                        alt196=1;
                        }
                        break;
                    case KW_SERDE:
                        {
                        alt196=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 196, 2, input);

                        throw nvae;

                    }

                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 196, 1, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 196, 0, input);

                throw nvae;

            }

            switch (alt196) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1749:7: rowFormatDelimited
                    {
                    pushFollow(FOLLOW_rowFormatDelimited_in_tableRowFormat10212);
                    rowFormatDelimited673=rowFormatDelimited();

                    state._fsp--;

                    stream_rowFormatDelimited.add(rowFormatDelimited673.getTree());

                    // AST REWRITE
                    // elements: rowFormatDelimited
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1750:5: -> ^( TOK_TABLEROWFORMAT rowFormatDelimited )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1750:8: ^( TOK_TABLEROWFORMAT rowFormatDelimited )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLEROWFORMAT, "TOK_TABLEROWFORMAT")
                        , root_1);

                        adaptor.addChild(root_1, stream_rowFormatDelimited.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1751:7: rowFormatSerde
                    {
                    pushFollow(FOLLOW_rowFormatSerde_in_tableRowFormat10232);
                    rowFormatSerde674=rowFormatSerde();

                    state._fsp--;

                    stream_rowFormatSerde.add(rowFormatSerde674.getTree());

                    // AST REWRITE
                    // elements: rowFormatSerde
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1752:5: -> ^( TOK_TABLESERIALIZER rowFormatSerde )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1752:8: ^( TOK_TABLESERIALIZER rowFormatSerde )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLESERIALIZER, "TOK_TABLESERIALIZER")
                        , root_1);

                        adaptor.addChild(root_1, stream_rowFormatSerde.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableRowFormat"


    public static class tablePropertiesPrefixed_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tablePropertiesPrefixed"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1755:1: tablePropertiesPrefixed : KW_TBLPROPERTIES ! tableProperties ;
    public final HiveParser.tablePropertiesPrefixed_return tablePropertiesPrefixed() throws RecognitionException {
        HiveParser.tablePropertiesPrefixed_return retval = new HiveParser.tablePropertiesPrefixed_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_TBLPROPERTIES675=null;
        HiveParser.tableProperties_return tableProperties676 =null;


        CommonTree KW_TBLPROPERTIES675_tree=null;

         pushMsg("table properties with prefix", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1758:5: ( KW_TBLPROPERTIES ! tableProperties )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1759:9: KW_TBLPROPERTIES ! tableProperties
            {
            root_0 = (CommonTree)adaptor.nil();


            KW_TBLPROPERTIES675=(Token)match(input,KW_TBLPROPERTIES,FOLLOW_KW_TBLPROPERTIES_in_tablePropertiesPrefixed10279); 

            pushFollow(FOLLOW_tableProperties_in_tablePropertiesPrefixed10282);
            tableProperties676=tableProperties();

            state._fsp--;

            adaptor.addChild(root_0, tableProperties676.getTree());

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tablePropertiesPrefixed"


    public static class tableProperties_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableProperties"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1762:1: tableProperties : LPAREN tablePropertiesList RPAREN -> ^( TOK_TABLEPROPERTIES tablePropertiesList ) ;
    public final HiveParser.tableProperties_return tableProperties() throws RecognitionException {
        HiveParser.tableProperties_return retval = new HiveParser.tableProperties_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token LPAREN677=null;
        Token RPAREN679=null;
        HiveParser.tablePropertiesList_return tablePropertiesList678 =null;


        CommonTree LPAREN677_tree=null;
        CommonTree RPAREN679_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_tablePropertiesList=new RewriteRuleSubtreeStream(adaptor,"rule tablePropertiesList");
         pushMsg("table properties", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1765:5: ( LPAREN tablePropertiesList RPAREN -> ^( TOK_TABLEPROPERTIES tablePropertiesList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1766:7: LPAREN tablePropertiesList RPAREN
            {
            LPAREN677=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_tableProperties10315);  
            stream_LPAREN.add(LPAREN677);


            pushFollow(FOLLOW_tablePropertiesList_in_tableProperties10317);
            tablePropertiesList678=tablePropertiesList();

            state._fsp--;

            stream_tablePropertiesList.add(tablePropertiesList678.getTree());

            RPAREN679=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_tableProperties10319);  
            stream_RPAREN.add(RPAREN679);


            // AST REWRITE
            // elements: tablePropertiesList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1766:41: -> ^( TOK_TABLEPROPERTIES tablePropertiesList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1766:44: ^( TOK_TABLEPROPERTIES tablePropertiesList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEPROPERTIES, "TOK_TABLEPROPERTIES")
                , root_1);

                adaptor.addChild(root_1, stream_tablePropertiesList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableProperties"


    public static class tablePropertiesList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tablePropertiesList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1769:1: tablePropertiesList : ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_TABLEPROPLIST ( keyValueProperty )+ ) | keyProperty ( COMMA keyProperty )* -> ^( TOK_TABLEPROPLIST ( keyProperty )+ ) );
    public final HiveParser.tablePropertiesList_return tablePropertiesList() throws RecognitionException {
        HiveParser.tablePropertiesList_return retval = new HiveParser.tablePropertiesList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA681=null;
        Token COMMA684=null;
        HiveParser.keyValueProperty_return keyValueProperty680 =null;

        HiveParser.keyValueProperty_return keyValueProperty682 =null;

        HiveParser.keyProperty_return keyProperty683 =null;

        HiveParser.keyProperty_return keyProperty685 =null;


        CommonTree COMMA681_tree=null;
        CommonTree COMMA684_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_keyValueProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyValueProperty");
        RewriteRuleSubtreeStream stream_keyProperty=new RewriteRuleSubtreeStream(adaptor,"rule keyProperty");
         pushMsg("table properties list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1772:5: ( keyValueProperty ( COMMA keyValueProperty )* -> ^( TOK_TABLEPROPLIST ( keyValueProperty )+ ) | keyProperty ( COMMA keyProperty )* -> ^( TOK_TABLEPROPLIST ( keyProperty )+ ) )
            int alt199=2;
            switch ( input.LA(1) ) {
            case StringLiteral:
                {
                switch ( input.LA(2) ) {
                case EQUAL:
                    {
                    alt199=1;
                    }
                    break;
                case COMMA:
                case RPAREN:
                    {
                    alt199=2;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 199, 1, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 199, 0, input);

                throw nvae;

            }

            switch (alt199) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1773:7: keyValueProperty ( COMMA keyValueProperty )*
                    {
                    pushFollow(FOLLOW_keyValueProperty_in_tablePropertiesList10360);
                    keyValueProperty680=keyValueProperty();

                    state._fsp--;

                    stream_keyValueProperty.add(keyValueProperty680.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1773:24: ( COMMA keyValueProperty )*
                    loop197:
                    do {
                        int alt197=2;
                        switch ( input.LA(1) ) {
                        case COMMA:
                            {
                            alt197=1;
                            }
                            break;

                        }

                        switch (alt197) {
                    	case 1 :
                    	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1773:25: COMMA keyValueProperty
                    	    {
                    	    COMMA681=(Token)match(input,COMMA,FOLLOW_COMMA_in_tablePropertiesList10363);  
                    	    stream_COMMA.add(COMMA681);


                    	    pushFollow(FOLLOW_keyValueProperty_in_tablePropertiesList10365);
                    	    keyValueProperty682=keyValueProperty();

                    	    state._fsp--;

                    	    stream_keyValueProperty.add(keyValueProperty682.getTree());

                    	    }
                    	    break;

                    	default :
                    	    break loop197;
                        }
                    } while (true);


                    // AST REWRITE
                    // elements: keyValueProperty
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1773:50: -> ^( TOK_TABLEPROPLIST ( keyValueProperty )+ )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1773:53: ^( TOK_TABLEPROPLIST ( keyValueProperty )+ )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLEPROPLIST, "TOK_TABLEPROPLIST")
                        , root_1);

                        if ( !(stream_keyValueProperty.hasNext()) ) {
                            throw new RewriteEarlyExitException();
                        }
                        while ( stream_keyValueProperty.hasNext() ) {
                            adaptor.addChild(root_1, stream_keyValueProperty.nextTree());

                        }
                        stream_keyValueProperty.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1775:7: keyProperty ( COMMA keyProperty )*
                    {
                    pushFollow(FOLLOW_keyProperty_in_tablePropertiesList10390);
                    keyProperty683=keyProperty();

                    state._fsp--;

                    stream_keyProperty.add(keyProperty683.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1775:19: ( COMMA keyProperty )*
                    loop198:
                    do {
                        int alt198=2;
                        switch ( input.LA(1) ) {
                        case COMMA:
                            {
                            alt198=1;
                            }
                            break;

                        }

                        switch (alt198) {
                    	case 1 :
                    	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1775:20: COMMA keyProperty
                    	    {
                    	    COMMA684=(Token)match(input,COMMA,FOLLOW_COMMA_in_tablePropertiesList10393);  
                    	    stream_COMMA.add(COMMA684);


                    	    pushFollow(FOLLOW_keyProperty_in_tablePropertiesList10395);
                    	    keyProperty685=keyProperty();

                    	    state._fsp--;

                    	    stream_keyProperty.add(keyProperty685.getTree());

                    	    }
                    	    break;

                    	default :
                    	    break loop198;
                        }
                    } while (true);


                    // AST REWRITE
                    // elements: keyProperty
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1775:40: -> ^( TOK_TABLEPROPLIST ( keyProperty )+ )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1775:43: ^( TOK_TABLEPROPLIST ( keyProperty )+ )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLEPROPLIST, "TOK_TABLEPROPLIST")
                        , root_1);

                        if ( !(stream_keyProperty.hasNext()) ) {
                            throw new RewriteEarlyExitException();
                        }
                        while ( stream_keyProperty.hasNext() ) {
                            adaptor.addChild(root_1, stream_keyProperty.nextTree());

                        }
                        stream_keyProperty.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tablePropertiesList"


    public static class keyValueProperty_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "keyValueProperty"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1778:1: keyValueProperty : key= StringLiteral EQUAL value= StringLiteral -> ^( TOK_TABLEPROPERTY $key $value) ;
    public final HiveParser.keyValueProperty_return keyValueProperty() throws RecognitionException {
        HiveParser.keyValueProperty_return retval = new HiveParser.keyValueProperty_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token key=null;
        Token value=null;
        Token EQUAL686=null;

        CommonTree key_tree=null;
        CommonTree value_tree=null;
        CommonTree EQUAL686_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_EQUAL=new RewriteRuleTokenStream(adaptor,"token EQUAL");

         pushMsg("specifying key/value property", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1781:5: (key= StringLiteral EQUAL value= StringLiteral -> ^( TOK_TABLEPROPERTY $key $value) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1782:7: key= StringLiteral EQUAL value= StringLiteral
            {
            key=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_keyValueProperty10441);  
            stream_StringLiteral.add(key);


            EQUAL686=(Token)match(input,EQUAL,FOLLOW_EQUAL_in_keyValueProperty10443);  
            stream_EQUAL.add(EQUAL686);


            value=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_keyValueProperty10447);  
            stream_StringLiteral.add(value);


            // AST REWRITE
            // elements: value, key
            // token labels: value, key
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_value=new RewriteRuleTokenStream(adaptor,"token value",value);
            RewriteRuleTokenStream stream_key=new RewriteRuleTokenStream(adaptor,"token key",key);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1782:51: -> ^( TOK_TABLEPROPERTY $key $value)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1782:54: ^( TOK_TABLEPROPERTY $key $value)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY")
                , root_1);

                adaptor.addChild(root_1, stream_key.nextNode());

                adaptor.addChild(root_1, stream_value.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "keyValueProperty"


    public static class keyProperty_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "keyProperty"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1785:1: keyProperty : key= StringLiteral -> ^( TOK_TABLEPROPERTY $key TOK_NULL ) ;
    public final HiveParser.keyProperty_return keyProperty() throws RecognitionException {
        HiveParser.keyProperty_return retval = new HiveParser.keyProperty_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token key=null;

        CommonTree key_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");

         pushMsg("specifying key property", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1788:5: (key= StringLiteral -> ^( TOK_TABLEPROPERTY $key TOK_NULL ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1789:7: key= StringLiteral
            {
            key=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_keyProperty10494);  
            stream_StringLiteral.add(key);


            // AST REWRITE
            // elements: key
            // token labels: key
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_key=new RewriteRuleTokenStream(adaptor,"token key",key);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1789:25: -> ^( TOK_TABLEPROPERTY $key TOK_NULL )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1789:28: ^( TOK_TABLEPROPERTY $key TOK_NULL )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY")
                , root_1);

                adaptor.addChild(root_1, stream_key.nextNode());

                adaptor.addChild(root_1, 
                (CommonTree)adaptor.create(TOK_NULL, "TOK_NULL")
                );

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "keyProperty"


    public static class tableRowFormatFieldIdentifier_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableRowFormatFieldIdentifier"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1792:1: tableRowFormatFieldIdentifier : KW_FIELDS KW_TERMINATED KW_BY fldIdnt= StringLiteral ( KW_ESCAPED KW_BY fldEscape= StringLiteral )? -> ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? ) ;
    public final HiveParser.tableRowFormatFieldIdentifier_return tableRowFormatFieldIdentifier() throws RecognitionException {
        HiveParser.tableRowFormatFieldIdentifier_return retval = new HiveParser.tableRowFormatFieldIdentifier_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token fldIdnt=null;
        Token fldEscape=null;
        Token KW_FIELDS687=null;
        Token KW_TERMINATED688=null;
        Token KW_BY689=null;
        Token KW_ESCAPED690=null;
        Token KW_BY691=null;

        CommonTree fldIdnt_tree=null;
        CommonTree fldEscape_tree=null;
        CommonTree KW_FIELDS687_tree=null;
        CommonTree KW_TERMINATED688_tree=null;
        CommonTree KW_BY689_tree=null;
        CommonTree KW_ESCAPED690_tree=null;
        CommonTree KW_BY691_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_ESCAPED=new RewriteRuleTokenStream(adaptor,"token KW_ESCAPED");
        RewriteRuleTokenStream stream_KW_FIELDS=new RewriteRuleTokenStream(adaptor,"token KW_FIELDS");
        RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");

         pushMsg("table row format's field separator", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1795:5: ( KW_FIELDS KW_TERMINATED KW_BY fldIdnt= StringLiteral ( KW_ESCAPED KW_BY fldEscape= StringLiteral )? -> ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1796:7: KW_FIELDS KW_TERMINATED KW_BY fldIdnt= StringLiteral ( KW_ESCAPED KW_BY fldEscape= StringLiteral )?
            {
            KW_FIELDS687=(Token)match(input,KW_FIELDS,FOLLOW_KW_FIELDS_in_tableRowFormatFieldIdentifier10538);  
            stream_KW_FIELDS.add(KW_FIELDS687);


            KW_TERMINATED688=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatFieldIdentifier10540);  
            stream_KW_TERMINATED.add(KW_TERMINATED688);


            KW_BY689=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatFieldIdentifier10542);  
            stream_KW_BY.add(KW_BY689);


            fldIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatFieldIdentifier10546);  
            stream_StringLiteral.add(fldIdnt);


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1796:59: ( KW_ESCAPED KW_BY fldEscape= StringLiteral )?
            int alt200=2;
            switch ( input.LA(1) ) {
                case KW_ESCAPED:
                    {
                    alt200=1;
                    }
                    break;
            }

            switch (alt200) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1796:60: KW_ESCAPED KW_BY fldEscape= StringLiteral
                    {
                    KW_ESCAPED690=(Token)match(input,KW_ESCAPED,FOLLOW_KW_ESCAPED_in_tableRowFormatFieldIdentifier10549);  
                    stream_KW_ESCAPED.add(KW_ESCAPED690);


                    KW_BY691=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatFieldIdentifier10551);  
                    stream_KW_BY.add(KW_BY691);


                    fldEscape=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatFieldIdentifier10555);  
                    stream_StringLiteral.add(fldEscape);


                    }
                    break;

            }


            // AST REWRITE
            // elements: fldEscape, fldIdnt
            // token labels: fldEscape, fldIdnt
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_fldEscape=new RewriteRuleTokenStream(adaptor,"token fldEscape",fldEscape);
            RewriteRuleTokenStream stream_fldIdnt=new RewriteRuleTokenStream(adaptor,"token fldIdnt",fldIdnt);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1797:5: -> ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1797:8: ^( TOK_TABLEROWFORMATFIELD $fldIdnt ( $fldEscape)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEROWFORMATFIELD, "TOK_TABLEROWFORMATFIELD")
                , root_1);

                adaptor.addChild(root_1, stream_fldIdnt.nextNode());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1797:44: ( $fldEscape)?
                if ( stream_fldEscape.hasNext() ) {
                    adaptor.addChild(root_1, stream_fldEscape.nextNode());

                }
                stream_fldEscape.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableRowFormatFieldIdentifier"


    public static class tableRowFormatCollItemsIdentifier_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableRowFormatCollItemsIdentifier"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1800:1: tableRowFormatCollItemsIdentifier : KW_COLLECTION KW_ITEMS KW_TERMINATED KW_BY collIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt) ;
    public final HiveParser.tableRowFormatCollItemsIdentifier_return tableRowFormatCollItemsIdentifier() throws RecognitionException {
        HiveParser.tableRowFormatCollItemsIdentifier_return retval = new HiveParser.tableRowFormatCollItemsIdentifier_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token collIdnt=null;
        Token KW_COLLECTION692=null;
        Token KW_ITEMS693=null;
        Token KW_TERMINATED694=null;
        Token KW_BY695=null;

        CommonTree collIdnt_tree=null;
        CommonTree KW_COLLECTION692_tree=null;
        CommonTree KW_ITEMS693_tree=null;
        CommonTree KW_TERMINATED694_tree=null;
        CommonTree KW_BY695_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_ITEMS=new RewriteRuleTokenStream(adaptor,"token KW_ITEMS");
        RewriteRuleTokenStream stream_KW_COLLECTION=new RewriteRuleTokenStream(adaptor,"token KW_COLLECTION");
        RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");

         pushMsg("table row format's column separator", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1803:5: ( KW_COLLECTION KW_ITEMS KW_TERMINATED KW_BY collIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1804:7: KW_COLLECTION KW_ITEMS KW_TERMINATED KW_BY collIdnt= StringLiteral
            {
            KW_COLLECTION692=(Token)match(input,KW_COLLECTION,FOLLOW_KW_COLLECTION_in_tableRowFormatCollItemsIdentifier10607);  
            stream_KW_COLLECTION.add(KW_COLLECTION692);


            KW_ITEMS693=(Token)match(input,KW_ITEMS,FOLLOW_KW_ITEMS_in_tableRowFormatCollItemsIdentifier10609);  
            stream_KW_ITEMS.add(KW_ITEMS693);


            KW_TERMINATED694=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatCollItemsIdentifier10611);  
            stream_KW_TERMINATED.add(KW_TERMINATED694);


            KW_BY695=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatCollItemsIdentifier10613);  
            stream_KW_BY.add(KW_BY695);


            collIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatCollItemsIdentifier10617);  
            stream_StringLiteral.add(collIdnt);


            // AST REWRITE
            // elements: collIdnt
            // token labels: collIdnt
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_collIdnt=new RewriteRuleTokenStream(adaptor,"token collIdnt",collIdnt);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1805:5: -> ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1805:8: ^( TOK_TABLEROWFORMATCOLLITEMS $collIdnt)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEROWFORMATCOLLITEMS, "TOK_TABLEROWFORMATCOLLITEMS")
                , root_1);

                adaptor.addChild(root_1, stream_collIdnt.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableRowFormatCollItemsIdentifier"


    public static class tableRowFormatMapKeysIdentifier_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableRowFormatMapKeysIdentifier"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1808:1: tableRowFormatMapKeysIdentifier : KW_MAP KW_KEYS KW_TERMINATED KW_BY mapKeysIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt) ;
    public final HiveParser.tableRowFormatMapKeysIdentifier_return tableRowFormatMapKeysIdentifier() throws RecognitionException {
        HiveParser.tableRowFormatMapKeysIdentifier_return retval = new HiveParser.tableRowFormatMapKeysIdentifier_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token mapKeysIdnt=null;
        Token KW_MAP696=null;
        Token KW_KEYS697=null;
        Token KW_TERMINATED698=null;
        Token KW_BY699=null;

        CommonTree mapKeysIdnt_tree=null;
        CommonTree KW_MAP696_tree=null;
        CommonTree KW_KEYS697_tree=null;
        CommonTree KW_TERMINATED698_tree=null;
        CommonTree KW_BY699_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_KEYS=new RewriteRuleTokenStream(adaptor,"token KW_KEYS");
        RewriteRuleTokenStream stream_KW_MAP=new RewriteRuleTokenStream(adaptor,"token KW_MAP");
        RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");

         pushMsg("table row format's map key separator", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1811:5: ( KW_MAP KW_KEYS KW_TERMINATED KW_BY mapKeysIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1812:7: KW_MAP KW_KEYS KW_TERMINATED KW_BY mapKeysIdnt= StringLiteral
            {
            KW_MAP696=(Token)match(input,KW_MAP,FOLLOW_KW_MAP_in_tableRowFormatMapKeysIdentifier10663);  
            stream_KW_MAP.add(KW_MAP696);


            KW_KEYS697=(Token)match(input,KW_KEYS,FOLLOW_KW_KEYS_in_tableRowFormatMapKeysIdentifier10665);  
            stream_KW_KEYS.add(KW_KEYS697);


            KW_TERMINATED698=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatMapKeysIdentifier10667);  
            stream_KW_TERMINATED.add(KW_TERMINATED698);


            KW_BY699=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatMapKeysIdentifier10669);  
            stream_KW_BY.add(KW_BY699);


            mapKeysIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatMapKeysIdentifier10673);  
            stream_StringLiteral.add(mapKeysIdnt);


            // AST REWRITE
            // elements: mapKeysIdnt
            // token labels: mapKeysIdnt
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_mapKeysIdnt=new RewriteRuleTokenStream(adaptor,"token mapKeysIdnt",mapKeysIdnt);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1813:5: -> ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1813:8: ^( TOK_TABLEROWFORMATMAPKEYS $mapKeysIdnt)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEROWFORMATMAPKEYS, "TOK_TABLEROWFORMATMAPKEYS")
                , root_1);

                adaptor.addChild(root_1, stream_mapKeysIdnt.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableRowFormatMapKeysIdentifier"


    public static class tableRowFormatLinesIdentifier_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableRowFormatLinesIdentifier"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1816:1: tableRowFormatLinesIdentifier : KW_LINES KW_TERMINATED KW_BY linesIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATLINES $linesIdnt) ;
    public final HiveParser.tableRowFormatLinesIdentifier_return tableRowFormatLinesIdentifier() throws RecognitionException {
        HiveParser.tableRowFormatLinesIdentifier_return retval = new HiveParser.tableRowFormatLinesIdentifier_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token linesIdnt=null;
        Token KW_LINES700=null;
        Token KW_TERMINATED701=null;
        Token KW_BY702=null;

        CommonTree linesIdnt_tree=null;
        CommonTree KW_LINES700_tree=null;
        CommonTree KW_TERMINATED701_tree=null;
        CommonTree KW_BY702_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_LINES=new RewriteRuleTokenStream(adaptor,"token KW_LINES");
        RewriteRuleTokenStream stream_KW_TERMINATED=new RewriteRuleTokenStream(adaptor,"token KW_TERMINATED");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");

         pushMsg("table row format's line separator", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1819:5: ( KW_LINES KW_TERMINATED KW_BY linesIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATLINES $linesIdnt) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1820:7: KW_LINES KW_TERMINATED KW_BY linesIdnt= StringLiteral
            {
            KW_LINES700=(Token)match(input,KW_LINES,FOLLOW_KW_LINES_in_tableRowFormatLinesIdentifier10719);  
            stream_KW_LINES.add(KW_LINES700);


            KW_TERMINATED701=(Token)match(input,KW_TERMINATED,FOLLOW_KW_TERMINATED_in_tableRowFormatLinesIdentifier10721);  
            stream_KW_TERMINATED.add(KW_TERMINATED701);


            KW_BY702=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableRowFormatLinesIdentifier10723);  
            stream_KW_BY.add(KW_BY702);


            linesIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowFormatLinesIdentifier10727);  
            stream_StringLiteral.add(linesIdnt);


            // AST REWRITE
            // elements: linesIdnt
            // token labels: linesIdnt
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_linesIdnt=new RewriteRuleTokenStream(adaptor,"token linesIdnt",linesIdnt);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1821:5: -> ^( TOK_TABLEROWFORMATLINES $linesIdnt)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1821:8: ^( TOK_TABLEROWFORMATLINES $linesIdnt)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEROWFORMATLINES, "TOK_TABLEROWFORMATLINES")
                , root_1);

                adaptor.addChild(root_1, stream_linesIdnt.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableRowFormatLinesIdentifier"


    public static class tableRowNullFormat_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableRowNullFormat"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1824:1: tableRowNullFormat : KW_NULL KW_DEFINED KW_AS nullIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATNULL $nullIdnt) ;
    public final HiveParser.tableRowNullFormat_return tableRowNullFormat() throws RecognitionException {
        HiveParser.tableRowNullFormat_return retval = new HiveParser.tableRowNullFormat_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token nullIdnt=null;
        Token KW_NULL703=null;
        Token KW_DEFINED704=null;
        Token KW_AS705=null;

        CommonTree nullIdnt_tree=null;
        CommonTree KW_NULL703_tree=null;
        CommonTree KW_DEFINED704_tree=null;
        CommonTree KW_AS705_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_NULL=new RewriteRuleTokenStream(adaptor,"token KW_NULL");
        RewriteRuleTokenStream stream_KW_DEFINED=new RewriteRuleTokenStream(adaptor,"token KW_DEFINED");

         pushMsg("table row format's null specifier", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1827:5: ( KW_NULL KW_DEFINED KW_AS nullIdnt= StringLiteral -> ^( TOK_TABLEROWFORMATNULL $nullIdnt) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1828:7: KW_NULL KW_DEFINED KW_AS nullIdnt= StringLiteral
            {
            KW_NULL703=(Token)match(input,KW_NULL,FOLLOW_KW_NULL_in_tableRowNullFormat10773);  
            stream_KW_NULL.add(KW_NULL703);


            KW_DEFINED704=(Token)match(input,KW_DEFINED,FOLLOW_KW_DEFINED_in_tableRowNullFormat10775);  
            stream_KW_DEFINED.add(KW_DEFINED704);


            KW_AS705=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_tableRowNullFormat10777);  
            stream_KW_AS.add(KW_AS705);


            nullIdnt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableRowNullFormat10781);  
            stream_StringLiteral.add(nullIdnt);


            // AST REWRITE
            // elements: nullIdnt
            // token labels: nullIdnt
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_nullIdnt=new RewriteRuleTokenStream(adaptor,"token nullIdnt",nullIdnt);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1829:5: -> ^( TOK_TABLEROWFORMATNULL $nullIdnt)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1829:8: ^( TOK_TABLEROWFORMATNULL $nullIdnt)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLEROWFORMATNULL, "TOK_TABLEROWFORMATNULL")
                , root_1);

                adaptor.addChild(root_1, stream_nullIdnt.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableRowNullFormat"


    public static class tableFileFormat_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableFileFormat"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1831:1: tableFileFormat : ( KW_STORED KW_AS KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? ) | KW_STORED KW_BY storageHandler= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? ) | KW_STORED KW_AS genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) );
    public final HiveParser.tableFileFormat_return tableFileFormat() throws RecognitionException {
        HiveParser.tableFileFormat_return retval = new HiveParser.tableFileFormat_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token inFmt=null;
        Token outFmt=null;
        Token inDriver=null;
        Token outDriver=null;
        Token storageHandler=null;
        Token KW_STORED706=null;
        Token KW_AS707=null;
        Token KW_INPUTFORMAT708=null;
        Token KW_OUTPUTFORMAT709=null;
        Token KW_INPUTDRIVER710=null;
        Token KW_OUTPUTDRIVER711=null;
        Token KW_STORED712=null;
        Token KW_BY713=null;
        Token KW_WITH714=null;
        Token KW_SERDEPROPERTIES715=null;
        Token KW_STORED716=null;
        Token KW_AS717=null;
        HiveParser.tableProperties_return serdeprops =null;

        HiveParser_IdentifiersParser.identifier_return genericSpec =null;


        CommonTree inFmt_tree=null;
        CommonTree outFmt_tree=null;
        CommonTree inDriver_tree=null;
        CommonTree outDriver_tree=null;
        CommonTree storageHandler_tree=null;
        CommonTree KW_STORED706_tree=null;
        CommonTree KW_AS707_tree=null;
        CommonTree KW_INPUTFORMAT708_tree=null;
        CommonTree KW_OUTPUTFORMAT709_tree=null;
        CommonTree KW_INPUTDRIVER710_tree=null;
        CommonTree KW_OUTPUTDRIVER711_tree=null;
        CommonTree KW_STORED712_tree=null;
        CommonTree KW_BY713_tree=null;
        CommonTree KW_WITH714_tree=null;
        CommonTree KW_SERDEPROPERTIES715_tree=null;
        CommonTree KW_STORED716_tree=null;
        CommonTree KW_AS717_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_KW_INPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_INPUTDRIVER");
        RewriteRuleTokenStream stream_KW_INPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_INPUTFORMAT");
        RewriteRuleTokenStream stream_KW_OUTPUTFORMAT=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTFORMAT");
        RewriteRuleTokenStream stream_KW_STORED=new RewriteRuleTokenStream(adaptor,"token KW_STORED");
        RewriteRuleTokenStream stream_KW_OUTPUTDRIVER=new RewriteRuleTokenStream(adaptor,"token KW_OUTPUTDRIVER");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleTokenStream stream_KW_BY=new RewriteRuleTokenStream(adaptor,"token KW_BY");
        RewriteRuleTokenStream stream_KW_SERDEPROPERTIES=new RewriteRuleTokenStream(adaptor,"token KW_SERDEPROPERTIES");
        RewriteRuleSubtreeStream stream_tableProperties=new RewriteRuleSubtreeStream(adaptor,"rule tableProperties");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("table file format specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1834:5: ( KW_STORED KW_AS KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )? -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? ) | KW_STORED KW_BY storageHandler= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )? -> ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? ) | KW_STORED KW_AS genericSpec= identifier -> ^( TOK_FILEFORMAT_GENERIC $genericSpec) )
            int alt203=3;
            switch ( input.LA(1) ) {
            case KW_STORED:
                {
                switch ( input.LA(2) ) {
                case KW_AS:
                    {
                    switch ( input.LA(3) ) {
                    case KW_INPUTFORMAT:
                        {
                        alt203=1;
                        }
                        break;
                    case Identifier:
                    case KW_ADD:
                    case KW_ADMIN:
                    case KW_AFTER:
                    case KW_ALL:
                    case KW_ALTER:
                    case KW_ANALYZE:
                    case KW_ARCHIVE:
                    case KW_ARRAY:
                    case KW_AS:
                    case KW_ASC:
                    case KW_AUTHORIZATION:
                    case KW_BEFORE:
                    case KW_BETWEEN:
                    case KW_BIGINT:
                    case KW_BINARY:
                    case KW_BOOLEAN:
                    case KW_BOTH:
                    case KW_BUCKET:
                    case KW_BUCKETS:
                    case KW_BY:
                    case KW_CASCADE:
                    case KW_CHANGE:
                    case KW_CLUSTER:
                    case KW_CLUSTERED:
                    case KW_CLUSTERSTATUS:
                    case KW_COLLECTION:
                    case KW_COLUMNS:
                    case KW_COMMENT:
                    case KW_COMPACT:
                    case KW_COMPACTIONS:
                    case KW_COMPUTE:
                    case KW_CONCATENATE:
                    case KW_CONTINUE:
                    case KW_CREATE:
                    case KW_CUBE:
                    case KW_CURSOR:
                    case KW_DATA:
                    case KW_DATABASES:
                    case KW_DATE:
                    case KW_DATETIME:
                    case KW_DBPROPERTIES:
                    case KW_DECIMAL:
                    case KW_DEFAULT:
                    case KW_DEFERRED:
                    case KW_DEFINED:
                    case KW_DELETE:
                    case KW_DELIMITED:
                    case KW_DEPENDENCY:
                    case KW_DESC:
                    case KW_DESCRIBE:
                    case KW_DIRECTORIES:
                    case KW_DIRECTORY:
                    case KW_DISABLE:
                    case KW_DISTRIBUTE:
                    case KW_DOUBLE:
                    case KW_DROP:
                    case KW_ELEM_TYPE:
                    case KW_ENABLE:
                    case KW_ESCAPED:
                    case KW_EXCLUSIVE:
                    case KW_EXISTS:
                    case KW_EXPLAIN:
                    case KW_EXPORT:
                    case KW_EXTERNAL:
                    case KW_FALSE:
                    case KW_FETCH:
                    case KW_FIELDS:
                    case KW_FILE:
                    case KW_FILEFORMAT:
                    case KW_FIRST:
                    case KW_FLOAT:
                    case KW_FOR:
                    case KW_FORMAT:
                    case KW_FORMATTED:
                    case KW_FULL:
                    case KW_FUNCTIONS:
                    case KW_GRANT:
                    case KW_GROUP:
                    case KW_GROUPING:
                    case KW_HOLD_DDLTIME:
                    case KW_IDXPROPERTIES:
                    case KW_IGNORE:
                    case KW_IMPORT:
                    case KW_IN:
                    case KW_INDEX:
                    case KW_INDEXES:
                    case KW_INNER:
                    case KW_INPATH:
                    case KW_INPUTDRIVER:
                    case KW_INSERT:
                    case KW_INT:
                    case KW_INTERSECT:
                    case KW_INTO:
                    case KW_IS:
                    case KW_ITEMS:
                    case KW_JAR:
                    case KW_KEYS:
                    case KW_KEY_TYPE:
                    case KW_LATERAL:
                    case KW_LEFT:
                    case KW_LIKE:
                    case KW_LIMIT:
                    case KW_LINES:
                    case KW_LOAD:
                    case KW_LOCAL:
                    case KW_LOCATION:
                    case KW_LOCK:
                    case KW_LOCKS:
                    case KW_LOGICAL:
                    case KW_LONG:
                    case KW_MAPJOIN:
                    case KW_MATERIALIZED:
                    case KW_MINUS:
                    case KW_MSCK:
                    case KW_NONE:
                    case KW_NOSCAN:
                    case KW_NO_DROP:
                    case KW_NULL:
                    case KW_OF:
                    case KW_OFFLINE:
                    case KW_OPTION:
                    case KW_ORDER:
                    case KW_OUT:
                    case KW_OUTER:
                    case KW_OUTPUTDRIVER:
                    case KW_OUTPUTFORMAT:
                    case KW_OVERWRITE:
                    case KW_OWNER:
                    case KW_PARTITION:
                    case KW_PARTITIONED:
                    case KW_PARTITIONS:
                    case KW_PERCENT:
                    case KW_PLUS:
                    case KW_PRETTY:
                    case KW_PRINCIPALS:
                    case KW_PROCEDURE:
                    case KW_PROTECTION:
                    case KW_PURGE:
                    case KW_RANGE:
                    case KW_READ:
                    case KW_READONLY:
                    case KW_READS:
                    case KW_REBUILD:
                    case KW_RECORDREADER:
                    case KW_RECORDWRITER:
                    case KW_REGEXP:
                    case KW_RENAME:
                    case KW_REPAIR:
                    case KW_REPLACE:
                    case KW_RESTRICT:
                    case KW_REVOKE:
                    case KW_REWRITE:
                    case KW_RIGHT:
                    case KW_RLIKE:
                    case KW_ROLE:
                    case KW_ROLES:
                    case KW_ROLLUP:
                    case KW_ROW:
                    case KW_ROWS:
                    case KW_SCHEMA:
                    case KW_SCHEMAS:
                    case KW_SEMI:
                    case KW_SERDE:
                    case KW_SERDEPROPERTIES:
                    case KW_SET:
                    case KW_SETS:
                    case KW_SHARED:
                    case KW_SHOW:
                    case KW_SHOW_DATABASE:
                    case KW_SKEWED:
                    case KW_SMALLINT:
                    case KW_SORT:
                    case KW_SORTED:
                    case KW_SSL:
                    case KW_STATISTICS:
                    case KW_STORED:
                    case KW_STREAMTABLE:
                    case KW_STRING:
                    case KW_STRUCT:
                    case KW_TABLE:
                    case KW_TABLES:
                    case KW_TBLPROPERTIES:
                    case KW_TEMPORARY:
                    case KW_TERMINATED:
                    case KW_TIMESTAMP:
                    case KW_TINYINT:
                    case KW_TO:
                    case KW_TOUCH:
                    case KW_TRANSACTIONS:
                    case KW_TRIGGER:
                    case KW_TRUE:
                    case KW_TRUNCATE:
                    case KW_UNARCHIVE:
                    case KW_UNDO:
                    case KW_UNION:
                    case KW_UNIONTYPE:
                    case KW_UNLOCK:
                    case KW_UNSET:
                    case KW_UNSIGNED:
                    case KW_UPDATE:
                    case KW_USE:
                    case KW_USER:
                    case KW_USING:
                    case KW_UTC:
                    case KW_UTCTIMESTAMP:
                    case KW_VALUES:
                    case KW_VALUE_TYPE:
                    case KW_VIEW:
                    case KW_WHILE:
                    case KW_WITH:
                        {
                        alt203=3;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 203, 2, input);

                        throw nvae;

                    }

                    }
                    break;
                case KW_BY:
                    {
                    alt203=2;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 203, 1, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 203, 0, input);

                throw nvae;

            }

            switch (alt203) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1835:7: KW_STORED KW_AS KW_INPUTFORMAT inFmt= StringLiteral KW_OUTPUTFORMAT outFmt= StringLiteral ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
                    {
                    KW_STORED706=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_tableFileFormat10826);  
                    stream_KW_STORED.add(KW_STORED706);


                    KW_AS707=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_tableFileFormat10828);  
                    stream_KW_AS.add(KW_AS707);


                    KW_INPUTFORMAT708=(Token)match(input,KW_INPUTFORMAT,FOLLOW_KW_INPUTFORMAT_in_tableFileFormat10830);  
                    stream_KW_INPUTFORMAT.add(KW_INPUTFORMAT708);


                    inFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat10834);  
                    stream_StringLiteral.add(inFmt);


                    KW_OUTPUTFORMAT709=(Token)match(input,KW_OUTPUTFORMAT,FOLLOW_KW_OUTPUTFORMAT_in_tableFileFormat10836);  
                    stream_KW_OUTPUTFORMAT.add(KW_OUTPUTFORMAT709);


                    outFmt=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat10840);  
                    stream_StringLiteral.add(outFmt);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1835:95: ( KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral )?
                    int alt201=2;
                    switch ( input.LA(1) ) {
                        case KW_INPUTDRIVER:
                            {
                            alt201=1;
                            }
                            break;
                    }

                    switch (alt201) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1835:96: KW_INPUTDRIVER inDriver= StringLiteral KW_OUTPUTDRIVER outDriver= StringLiteral
                            {
                            KW_INPUTDRIVER710=(Token)match(input,KW_INPUTDRIVER,FOLLOW_KW_INPUTDRIVER_in_tableFileFormat10843);  
                            stream_KW_INPUTDRIVER.add(KW_INPUTDRIVER710);


                            inDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat10847);  
                            stream_StringLiteral.add(inDriver);


                            KW_OUTPUTDRIVER711=(Token)match(input,KW_OUTPUTDRIVER,FOLLOW_KW_OUTPUTDRIVER_in_tableFileFormat10849);  
                            stream_KW_OUTPUTDRIVER.add(KW_OUTPUTDRIVER711);


                            outDriver=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat10853);  
                            stream_StringLiteral.add(outDriver);


                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: inFmt, outDriver, outFmt, inDriver
                    // token labels: outDriver, outFmt, inDriver, inFmt
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_outDriver=new RewriteRuleTokenStream(adaptor,"token outDriver",outDriver);
                    RewriteRuleTokenStream stream_outFmt=new RewriteRuleTokenStream(adaptor,"token outFmt",outFmt);
                    RewriteRuleTokenStream stream_inDriver=new RewriteRuleTokenStream(adaptor,"token inDriver",inDriver);
                    RewriteRuleTokenStream stream_inFmt=new RewriteRuleTokenStream(adaptor,"token inFmt",inFmt);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1836:7: -> ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1836:10: ^( TOK_TABLEFILEFORMAT $inFmt $outFmt ( $inDriver)? ( $outDriver)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_TABLEFILEFORMAT, "TOK_TABLEFILEFORMAT")
                        , root_1);

                        adaptor.addChild(root_1, stream_inFmt.nextNode());

                        adaptor.addChild(root_1, stream_outFmt.nextNode());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1836:48: ( $inDriver)?
                        if ( stream_inDriver.hasNext() ) {
                            adaptor.addChild(root_1, stream_inDriver.nextNode());

                        }
                        stream_inDriver.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1836:59: ( $outDriver)?
                        if ( stream_outDriver.hasNext() ) {
                            adaptor.addChild(root_1, stream_outDriver.nextNode());

                        }
                        stream_outDriver.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1837:9: KW_STORED KW_BY storageHandler= StringLiteral ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
                    {
                    KW_STORED712=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_tableFileFormat10891);  
                    stream_KW_STORED.add(KW_STORED712);


                    KW_BY713=(Token)match(input,KW_BY,FOLLOW_KW_BY_in_tableFileFormat10893);  
                    stream_KW_BY.add(KW_BY713);


                    storageHandler=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableFileFormat10897);  
                    stream_StringLiteral.add(storageHandler);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1838:10: ( KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties )?
                    int alt202=2;
                    switch ( input.LA(1) ) {
                        case KW_WITH:
                            {
                            alt202=1;
                            }
                            break;
                    }

                    switch (alt202) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1838:11: KW_WITH KW_SERDEPROPERTIES serdeprops= tableProperties
                            {
                            KW_WITH714=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_tableFileFormat10909);  
                            stream_KW_WITH.add(KW_WITH714);


                            KW_SERDEPROPERTIES715=(Token)match(input,KW_SERDEPROPERTIES,FOLLOW_KW_SERDEPROPERTIES_in_tableFileFormat10911);  
                            stream_KW_SERDEPROPERTIES.add(KW_SERDEPROPERTIES715);


                            pushFollow(FOLLOW_tableProperties_in_tableFileFormat10915);
                            serdeprops=tableProperties();

                            state._fsp--;

                            stream_tableProperties.add(serdeprops.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: storageHandler, serdeprops
                    // token labels: storageHandler
                    // rule labels: serdeprops, retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_storageHandler=new RewriteRuleTokenStream(adaptor,"token storageHandler",storageHandler);
                    RewriteRuleSubtreeStream stream_serdeprops=new RewriteRuleSubtreeStream(adaptor,"rule serdeprops",serdeprops!=null?serdeprops.tree:null);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1839:7: -> ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1839:10: ^( TOK_STORAGEHANDLER $storageHandler ( $serdeprops)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_STORAGEHANDLER, "TOK_STORAGEHANDLER")
                        , root_1);

                        adaptor.addChild(root_1, stream_storageHandler.nextNode());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1839:48: ( $serdeprops)?
                        if ( stream_serdeprops.hasNext() ) {
                            adaptor.addChild(root_1, stream_serdeprops.nextTree());

                        }
                        stream_serdeprops.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1840:9: KW_STORED KW_AS genericSpec= identifier
                    {
                    KW_STORED716=(Token)match(input,KW_STORED,FOLLOW_KW_STORED_in_tableFileFormat10946);  
                    stream_KW_STORED.add(KW_STORED716);


                    KW_AS717=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_tableFileFormat10948);  
                    stream_KW_AS.add(KW_AS717);


                    pushFollow(FOLLOW_identifier_in_tableFileFormat10952);
                    genericSpec=identifier();

                    state._fsp--;

                    stream_identifier.add(genericSpec.getTree());

                    // AST REWRITE
                    // elements: genericSpec
                    // token labels: 
                    // rule labels: retval, genericSpec
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
                    RewriteRuleSubtreeStream stream_genericSpec=new RewriteRuleSubtreeStream(adaptor,"rule genericSpec",genericSpec!=null?genericSpec.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1841:7: -> ^( TOK_FILEFORMAT_GENERIC $genericSpec)
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:1841:10: ^( TOK_FILEFORMAT_GENERIC $genericSpec)
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_FILEFORMAT_GENERIC, "TOK_FILEFORMAT_GENERIC")
                        , root_1);

                        adaptor.addChild(root_1, stream_genericSpec.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableFileFormat"


    public static class tableLocation_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "tableLocation"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1844:1: tableLocation : KW_LOCATION locn= StringLiteral -> ^( TOK_TABLELOCATION $locn) ;
    public final HiveParser.tableLocation_return tableLocation() throws RecognitionException {
        HiveParser.tableLocation_return retval = new HiveParser.tableLocation_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token locn=null;
        Token KW_LOCATION718=null;

        CommonTree locn_tree=null;
        CommonTree KW_LOCATION718_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_LOCATION=new RewriteRuleTokenStream(adaptor,"token KW_LOCATION");

         pushMsg("table location specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1847:5: ( KW_LOCATION locn= StringLiteral -> ^( TOK_TABLELOCATION $locn) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1848:7: KW_LOCATION locn= StringLiteral
            {
            KW_LOCATION718=(Token)match(input,KW_LOCATION,FOLLOW_KW_LOCATION_in_tableLocation11000);  
            stream_KW_LOCATION.add(KW_LOCATION718);


            locn=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_tableLocation11004);  
            stream_StringLiteral.add(locn);


            // AST REWRITE
            // elements: locn
            // token labels: locn
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_locn=new RewriteRuleTokenStream(adaptor,"token locn",locn);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1848:38: -> ^( TOK_TABLELOCATION $locn)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1848:41: ^( TOK_TABLELOCATION $locn)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABLELOCATION, "TOK_TABLELOCATION")
                , root_1);

                adaptor.addChild(root_1, stream_locn.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "tableLocation"


    public static class columnNameTypeList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameTypeList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1851:1: columnNameTypeList : columnNameType ( COMMA columnNameType )* -> ^( TOK_TABCOLLIST ( columnNameType )+ ) ;
    public final HiveParser.columnNameTypeList_return columnNameTypeList() throws RecognitionException {
        HiveParser.columnNameTypeList_return retval = new HiveParser.columnNameTypeList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA720=null;
        HiveParser.columnNameType_return columnNameType719 =null;

        HiveParser.columnNameType_return columnNameType721 =null;


        CommonTree COMMA720_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_columnNameType=new RewriteRuleSubtreeStream(adaptor,"rule columnNameType");
         pushMsg("column name type list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1854:5: ( columnNameType ( COMMA columnNameType )* -> ^( TOK_TABCOLLIST ( columnNameType )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1854:7: columnNameType ( COMMA columnNameType )*
            {
            pushFollow(FOLLOW_columnNameType_in_columnNameTypeList11040);
            columnNameType719=columnNameType();

            state._fsp--;

            stream_columnNameType.add(columnNameType719.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1854:22: ( COMMA columnNameType )*
            loop204:
            do {
                int alt204=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt204=1;
                    }
                    break;

                }

                switch (alt204) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1854:23: COMMA columnNameType
            	    {
            	    COMMA720=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameTypeList11043);  
            	    stream_COMMA.add(COMMA720);


            	    pushFollow(FOLLOW_columnNameType_in_columnNameTypeList11045);
            	    columnNameType721=columnNameType();

            	    state._fsp--;

            	    stream_columnNameType.add(columnNameType721.getTree());

            	    }
            	    break;

            	default :
            	    break loop204;
                }
            } while (true);


            // AST REWRITE
            // elements: columnNameType
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1854:46: -> ^( TOK_TABCOLLIST ( columnNameType )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1854:49: ^( TOK_TABCOLLIST ( columnNameType )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLLIST, "TOK_TABCOLLIST")
                , root_1);

                if ( !(stream_columnNameType.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_columnNameType.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameType.nextTree());

                }
                stream_columnNameType.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameTypeList"


    public static class columnNameColonTypeList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameColonTypeList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1857:1: columnNameColonTypeList : columnNameColonType ( COMMA columnNameColonType )* -> ^( TOK_TABCOLLIST ( columnNameColonType )+ ) ;
    public final HiveParser.columnNameColonTypeList_return columnNameColonTypeList() throws RecognitionException {
        HiveParser.columnNameColonTypeList_return retval = new HiveParser.columnNameColonTypeList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA723=null;
        HiveParser.columnNameColonType_return columnNameColonType722 =null;

        HiveParser.columnNameColonType_return columnNameColonType724 =null;


        CommonTree COMMA723_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_columnNameColonType=new RewriteRuleSubtreeStream(adaptor,"rule columnNameColonType");
         pushMsg("column name type list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1860:5: ( columnNameColonType ( COMMA columnNameColonType )* -> ^( TOK_TABCOLLIST ( columnNameColonType )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1860:7: columnNameColonType ( COMMA columnNameColonType )*
            {
            pushFollow(FOLLOW_columnNameColonType_in_columnNameColonTypeList11083);
            columnNameColonType722=columnNameColonType();

            state._fsp--;

            stream_columnNameColonType.add(columnNameColonType722.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1860:27: ( COMMA columnNameColonType )*
            loop205:
            do {
                int alt205=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt205=1;
                    }
                    break;

                }

                switch (alt205) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1860:28: COMMA columnNameColonType
            	    {
            	    COMMA723=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameColonTypeList11086);  
            	    stream_COMMA.add(COMMA723);


            	    pushFollow(FOLLOW_columnNameColonType_in_columnNameColonTypeList11088);
            	    columnNameColonType724=columnNameColonType();

            	    state._fsp--;

            	    stream_columnNameColonType.add(columnNameColonType724.getTree());

            	    }
            	    break;

            	default :
            	    break loop205;
                }
            } while (true);


            // AST REWRITE
            // elements: columnNameColonType
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1860:56: -> ^( TOK_TABCOLLIST ( columnNameColonType )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1860:59: ^( TOK_TABCOLLIST ( columnNameColonType )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLLIST, "TOK_TABCOLLIST")
                , root_1);

                if ( !(stream_columnNameColonType.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_columnNameColonType.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameColonType.nextTree());

                }
                stream_columnNameColonType.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameColonTypeList"


    public static class columnNameList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1863:1: columnNameList : columnName ( COMMA columnName )* -> ^( TOK_TABCOLNAME ( columnName )+ ) ;
    public final HiveParser.columnNameList_return columnNameList() throws RecognitionException {
        HiveParser.columnNameList_return retval = new HiveParser.columnNameList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA726=null;
        HiveParser.columnName_return columnName725 =null;

        HiveParser.columnName_return columnName727 =null;


        CommonTree COMMA726_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_columnName=new RewriteRuleSubtreeStream(adaptor,"rule columnName");
         pushMsg("column name list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1866:5: ( columnName ( COMMA columnName )* -> ^( TOK_TABCOLNAME ( columnName )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1866:7: columnName ( COMMA columnName )*
            {
            pushFollow(FOLLOW_columnName_in_columnNameList11126);
            columnName725=columnName();

            state._fsp--;

            stream_columnName.add(columnName725.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1866:18: ( COMMA columnName )*
            loop206:
            do {
                int alt206=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt206=1;
                    }
                    break;

                }

                switch (alt206) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1866:19: COMMA columnName
            	    {
            	    COMMA726=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameList11129);  
            	    stream_COMMA.add(COMMA726);


            	    pushFollow(FOLLOW_columnName_in_columnNameList11131);
            	    columnName727=columnName();

            	    state._fsp--;

            	    stream_columnName.add(columnName727.getTree());

            	    }
            	    break;

            	default :
            	    break loop206;
                }
            } while (true);


            // AST REWRITE
            // elements: columnName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1866:38: -> ^( TOK_TABCOLNAME ( columnName )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1866:41: ^( TOK_TABCOLNAME ( columnName )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME")
                , root_1);

                if ( !(stream_columnName.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_columnName.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnName.nextTree());

                }
                stream_columnName.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameList"


    public static class columnName_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnName"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1869:1: columnName : identifier ;
    public final HiveParser.columnName_return columnName() throws RecognitionException {
        HiveParser.columnName_return retval = new HiveParser.columnName_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser_IdentifiersParser.identifier_return identifier728 =null;



         pushMsg("column name", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1872:5: ( identifier )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1873:7: identifier
            {
            root_0 = (CommonTree)adaptor.nil();


            pushFollow(FOLLOW_identifier_in_columnName11175);
            identifier728=identifier();

            state._fsp--;

            adaptor.addChild(root_0, identifier728.getTree());

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnName"


    public static class columnNameOrderList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameOrderList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1876:1: columnNameOrderList : columnNameOrder ( COMMA columnNameOrder )* -> ^( TOK_TABCOLNAME ( columnNameOrder )+ ) ;
    public final HiveParser.columnNameOrderList_return columnNameOrderList() throws RecognitionException {
        HiveParser.columnNameOrderList_return retval = new HiveParser.columnNameOrderList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA730=null;
        HiveParser.columnNameOrder_return columnNameOrder729 =null;

        HiveParser.columnNameOrder_return columnNameOrder731 =null;


        CommonTree COMMA730_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_columnNameOrder=new RewriteRuleSubtreeStream(adaptor,"rule columnNameOrder");
         pushMsg("column name order list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:5: ( columnNameOrder ( COMMA columnNameOrder )* -> ^( TOK_TABCOLNAME ( columnNameOrder )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:7: columnNameOrder ( COMMA columnNameOrder )*
            {
            pushFollow(FOLLOW_columnNameOrder_in_columnNameOrderList11202);
            columnNameOrder729=columnNameOrder();

            state._fsp--;

            stream_columnNameOrder.add(columnNameOrder729.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:23: ( COMMA columnNameOrder )*
            loop207:
            do {
                int alt207=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt207=1;
                    }
                    break;

                }

                switch (alt207) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:24: COMMA columnNameOrder
            	    {
            	    COMMA730=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameOrderList11205);  
            	    stream_COMMA.add(COMMA730);


            	    pushFollow(FOLLOW_columnNameOrder_in_columnNameOrderList11207);
            	    columnNameOrder731=columnNameOrder();

            	    state._fsp--;

            	    stream_columnNameOrder.add(columnNameOrder731.getTree());

            	    }
            	    break;

            	default :
            	    break loop207;
                }
            } while (true);


            // AST REWRITE
            // elements: columnNameOrder
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1879:48: -> ^( TOK_TABCOLNAME ( columnNameOrder )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1879:51: ^( TOK_TABCOLNAME ( columnNameOrder )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME")
                , root_1);

                if ( !(stream_columnNameOrder.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_columnNameOrder.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameOrder.nextTree());

                }
                stream_columnNameOrder.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameOrderList"


    public static class skewedValueElement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedValueElement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1882:1: skewedValueElement : ( skewedColumnValues | skewedColumnValuePairList );
    public final HiveParser.skewedValueElement_return skewedValueElement() throws RecognitionException {
        HiveParser.skewedValueElement_return retval = new HiveParser.skewedValueElement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.skewedColumnValues_return skewedColumnValues732 =null;

        HiveParser.skewedColumnValuePairList_return skewedColumnValuePairList733 =null;



         pushMsg("skewed value element", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1885:5: ( skewedColumnValues | skewedColumnValuePairList )
            int alt208=2;
            switch ( input.LA(1) ) {
            case BigintLiteral:
            case CharSetName:
            case DecimalLiteral:
            case KW_DATE:
            case KW_FALSE:
            case KW_TRUE:
            case Number:
            case SmallintLiteral:
            case StringLiteral:
            case TinyintLiteral:
                {
                alt208=1;
                }
                break;
            case LPAREN:
                {
                alt208=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 208, 0, input);

                throw nvae;

            }

            switch (alt208) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1886:7: skewedColumnValues
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_skewedColumnValues_in_skewedValueElement11252);
                    skewedColumnValues732=skewedColumnValues();

                    state._fsp--;

                    adaptor.addChild(root_0, skewedColumnValues732.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1887:8: skewedColumnValuePairList
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_skewedColumnValuePairList_in_skewedValueElement11261);
                    skewedColumnValuePairList733=skewedColumnValuePairList();

                    state._fsp--;

                    adaptor.addChild(root_0, skewedColumnValuePairList733.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedValueElement"


    public static class skewedColumnValuePairList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedColumnValuePairList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1890:1: skewedColumnValuePairList : skewedColumnValuePair ( COMMA skewedColumnValuePair )* -> ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ ) ;
    public final HiveParser.skewedColumnValuePairList_return skewedColumnValuePairList() throws RecognitionException {
        HiveParser.skewedColumnValuePairList_return retval = new HiveParser.skewedColumnValuePairList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA735=null;
        HiveParser.skewedColumnValuePair_return skewedColumnValuePair734 =null;

        HiveParser.skewedColumnValuePair_return skewedColumnValuePair736 =null;


        CommonTree COMMA735_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_skewedColumnValuePair=new RewriteRuleSubtreeStream(adaptor,"rule skewedColumnValuePair");
         pushMsg("column value pair list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1893:5: ( skewedColumnValuePair ( COMMA skewedColumnValuePair )* -> ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1893:7: skewedColumnValuePair ( COMMA skewedColumnValuePair )*
            {
            pushFollow(FOLLOW_skewedColumnValuePair_in_skewedColumnValuePairList11288);
            skewedColumnValuePair734=skewedColumnValuePair();

            state._fsp--;

            stream_skewedColumnValuePair.add(skewedColumnValuePair734.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1893:29: ( COMMA skewedColumnValuePair )*
            loop209:
            do {
                int alt209=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt209=1;
                    }
                    break;

                }

                switch (alt209) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1893:30: COMMA skewedColumnValuePair
            	    {
            	    COMMA735=(Token)match(input,COMMA,FOLLOW_COMMA_in_skewedColumnValuePairList11291);  
            	    stream_COMMA.add(COMMA735);


            	    pushFollow(FOLLOW_skewedColumnValuePair_in_skewedColumnValuePairList11293);
            	    skewedColumnValuePair736=skewedColumnValuePair();

            	    state._fsp--;

            	    stream_skewedColumnValuePair.add(skewedColumnValuePair736.getTree());

            	    }
            	    break;

            	default :
            	    break loop209;
                }
            } while (true);


            // AST REWRITE
            // elements: skewedColumnValuePair
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1893:60: -> ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1893:63: ^( TOK_TABCOLVALUE_PAIR ( skewedColumnValuePair )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLVALUE_PAIR, "TOK_TABCOLVALUE_PAIR")
                , root_1);

                if ( !(stream_skewedColumnValuePair.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_skewedColumnValuePair.hasNext() ) {
                    adaptor.addChild(root_1, stream_skewedColumnValuePair.nextTree());

                }
                stream_skewedColumnValuePair.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedColumnValuePairList"


    public static class skewedColumnValuePair_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedColumnValuePair"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1896:1: skewedColumnValuePair : LPAREN colValues= skewedColumnValues RPAREN -> ^( TOK_TABCOLVALUES $colValues) ;
    public final HiveParser.skewedColumnValuePair_return skewedColumnValuePair() throws RecognitionException {
        HiveParser.skewedColumnValuePair_return retval = new HiveParser.skewedColumnValuePair_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token LPAREN737=null;
        Token RPAREN738=null;
        HiveParser.skewedColumnValues_return colValues =null;


        CommonTree LPAREN737_tree=null;
        CommonTree RPAREN738_tree=null;
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_skewedColumnValues=new RewriteRuleSubtreeStream(adaptor,"rule skewedColumnValues");
         pushMsg("column value pair", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1899:5: ( LPAREN colValues= skewedColumnValues RPAREN -> ^( TOK_TABCOLVALUES $colValues) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1900:7: LPAREN colValues= skewedColumnValues RPAREN
            {
            LPAREN737=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_skewedColumnValuePair11338);  
            stream_LPAREN.add(LPAREN737);


            pushFollow(FOLLOW_skewedColumnValues_in_skewedColumnValuePair11342);
            colValues=skewedColumnValues();

            state._fsp--;

            stream_skewedColumnValues.add(colValues.getTree());

            RPAREN738=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_skewedColumnValuePair11344);  
            stream_RPAREN.add(RPAREN738);


            // AST REWRITE
            // elements: colValues
            // token labels: 
            // rule labels: retval, colValues
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_colValues=new RewriteRuleSubtreeStream(adaptor,"rule colValues",colValues!=null?colValues.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1901:7: -> ^( TOK_TABCOLVALUES $colValues)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1901:10: ^( TOK_TABCOLVALUES $colValues)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLVALUES, "TOK_TABCOLVALUES")
                , root_1);

                adaptor.addChild(root_1, stream_colValues.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedColumnValuePair"


    public static class skewedColumnValues_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedColumnValues"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1904:1: skewedColumnValues : skewedColumnValue ( COMMA skewedColumnValue )* -> ^( TOK_TABCOLVALUE ( skewedColumnValue )+ ) ;
    public final HiveParser.skewedColumnValues_return skewedColumnValues() throws RecognitionException {
        HiveParser.skewedColumnValues_return retval = new HiveParser.skewedColumnValues_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA740=null;
        HiveParser.skewedColumnValue_return skewedColumnValue739 =null;

        HiveParser.skewedColumnValue_return skewedColumnValue741 =null;


        CommonTree COMMA740_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_skewedColumnValue=new RewriteRuleSubtreeStream(adaptor,"rule skewedColumnValue");
         pushMsg("column values", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1907:5: ( skewedColumnValue ( COMMA skewedColumnValue )* -> ^( TOK_TABCOLVALUE ( skewedColumnValue )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1907:7: skewedColumnValue ( COMMA skewedColumnValue )*
            {
            pushFollow(FOLLOW_skewedColumnValue_in_skewedColumnValues11387);
            skewedColumnValue739=skewedColumnValue();

            state._fsp--;

            stream_skewedColumnValue.add(skewedColumnValue739.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1907:25: ( COMMA skewedColumnValue )*
            loop210:
            do {
                int alt210=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt210=1;
                    }
                    break;

                }

                switch (alt210) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1907:26: COMMA skewedColumnValue
            	    {
            	    COMMA740=(Token)match(input,COMMA,FOLLOW_COMMA_in_skewedColumnValues11390);  
            	    stream_COMMA.add(COMMA740);


            	    pushFollow(FOLLOW_skewedColumnValue_in_skewedColumnValues11392);
            	    skewedColumnValue741=skewedColumnValue();

            	    state._fsp--;

            	    stream_skewedColumnValue.add(skewedColumnValue741.getTree());

            	    }
            	    break;

            	default :
            	    break loop210;
                }
            } while (true);


            // AST REWRITE
            // elements: skewedColumnValue
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1907:52: -> ^( TOK_TABCOLVALUE ( skewedColumnValue )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1907:55: ^( TOK_TABCOLVALUE ( skewedColumnValue )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLVALUE, "TOK_TABCOLVALUE")
                , root_1);

                if ( !(stream_skewedColumnValue.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_skewedColumnValue.hasNext() ) {
                    adaptor.addChild(root_1, stream_skewedColumnValue.nextTree());

                }
                stream_skewedColumnValue.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedColumnValues"


    public static class skewedColumnValue_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedColumnValue"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1910:1: skewedColumnValue : constant ;
    public final HiveParser.skewedColumnValue_return skewedColumnValue() throws RecognitionException {
        HiveParser.skewedColumnValue_return retval = new HiveParser.skewedColumnValue_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser_IdentifiersParser.constant_return constant742 =null;



         pushMsg("column value", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1913:5: ( constant )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1914:7: constant
            {
            root_0 = (CommonTree)adaptor.nil();


            pushFollow(FOLLOW_constant_in_skewedColumnValue11436);
            constant742=constant();

            state._fsp--;

            adaptor.addChild(root_0, constant742.getTree());

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedColumnValue"


    public static class skewedValueLocationElement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "skewedValueLocationElement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1917:1: skewedValueLocationElement : ( skewedColumnValue | skewedColumnValuePair );
    public final HiveParser.skewedValueLocationElement_return skewedValueLocationElement() throws RecognitionException {
        HiveParser.skewedValueLocationElement_return retval = new HiveParser.skewedValueLocationElement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.skewedColumnValue_return skewedColumnValue743 =null;

        HiveParser.skewedColumnValuePair_return skewedColumnValuePair744 =null;



         pushMsg("skewed value location element", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1920:5: ( skewedColumnValue | skewedColumnValuePair )
            int alt211=2;
            switch ( input.LA(1) ) {
            case BigintLiteral:
            case CharSetName:
            case DecimalLiteral:
            case KW_DATE:
            case KW_FALSE:
            case KW_TRUE:
            case Number:
            case SmallintLiteral:
            case StringLiteral:
            case TinyintLiteral:
                {
                alt211=1;
                }
                break;
            case LPAREN:
                {
                alt211=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 211, 0, input);

                throw nvae;

            }

            switch (alt211) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1921:7: skewedColumnValue
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_skewedColumnValue_in_skewedValueLocationElement11470);
                    skewedColumnValue743=skewedColumnValue();

                    state._fsp--;

                    adaptor.addChild(root_0, skewedColumnValue743.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1922:8: skewedColumnValuePair
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_skewedColumnValuePair_in_skewedValueLocationElement11479);
                    skewedColumnValuePair744=skewedColumnValuePair();

                    state._fsp--;

                    adaptor.addChild(root_0, skewedColumnValuePair744.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "skewedValueLocationElement"


    public static class columnNameOrder_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameOrder"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1925:1: columnNameOrder : identifier (asc= KW_ASC |desc= KW_DESC )? -> {$desc == null}? ^( TOK_TABSORTCOLNAMEASC identifier ) -> ^( TOK_TABSORTCOLNAMEDESC identifier ) ;
    public final HiveParser.columnNameOrder_return columnNameOrder() throws RecognitionException {
        HiveParser.columnNameOrder_return retval = new HiveParser.columnNameOrder_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token asc=null;
        Token desc=null;
        HiveParser_IdentifiersParser.identifier_return identifier745 =null;


        CommonTree asc_tree=null;
        CommonTree desc_tree=null;
        RewriteRuleTokenStream stream_KW_DESC=new RewriteRuleTokenStream(adaptor,"token KW_DESC");
        RewriteRuleTokenStream stream_KW_ASC=new RewriteRuleTokenStream(adaptor,"token KW_ASC");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("column name order", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1928:5: ( identifier (asc= KW_ASC |desc= KW_DESC )? -> {$desc == null}? ^( TOK_TABSORTCOLNAMEASC identifier ) -> ^( TOK_TABSORTCOLNAMEDESC identifier ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1928:7: identifier (asc= KW_ASC |desc= KW_DESC )?
            {
            pushFollow(FOLLOW_identifier_in_columnNameOrder11510);
            identifier745=identifier();

            state._fsp--;

            stream_identifier.add(identifier745.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1928:18: (asc= KW_ASC |desc= KW_DESC )?
            int alt212=3;
            switch ( input.LA(1) ) {
                case KW_ASC:
                    {
                    alt212=1;
                    }
                    break;
                case KW_DESC:
                    {
                    alt212=2;
                    }
                    break;
            }

            switch (alt212) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1928:19: asc= KW_ASC
                    {
                    asc=(Token)match(input,KW_ASC,FOLLOW_KW_ASC_in_columnNameOrder11515);  
                    stream_KW_ASC.add(asc);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1928:32: desc= KW_DESC
                    {
                    desc=(Token)match(input,KW_DESC,FOLLOW_KW_DESC_in_columnNameOrder11521);  
                    stream_KW_DESC.add(desc);


                    }
                    break;

            }


            // AST REWRITE
            // elements: identifier, identifier
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1929:5: -> {$desc == null}? ^( TOK_TABSORTCOLNAMEASC identifier )
            if (desc == null) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1929:25: ^( TOK_TABSORTCOLNAMEASC identifier )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC")
                , root_1);

                adaptor.addChild(root_1, stream_identifier.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1930:5: -> ^( TOK_TABSORTCOLNAMEDESC identifier )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1930:25: ^( TOK_TABSORTCOLNAMEDESC identifier )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABSORTCOLNAMEDESC, "TOK_TABSORTCOLNAMEDESC")
                , root_1);

                adaptor.addChild(root_1, stream_identifier.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameOrder"


    public static class columnNameCommentList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameCommentList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1933:1: columnNameCommentList : columnNameComment ( COMMA columnNameComment )* -> ^( TOK_TABCOLNAME ( columnNameComment )+ ) ;
    public final HiveParser.columnNameCommentList_return columnNameCommentList() throws RecognitionException {
        HiveParser.columnNameCommentList_return retval = new HiveParser.columnNameCommentList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA747=null;
        HiveParser.columnNameComment_return columnNameComment746 =null;

        HiveParser.columnNameComment_return columnNameComment748 =null;


        CommonTree COMMA747_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_columnNameComment=new RewriteRuleSubtreeStream(adaptor,"rule columnNameComment");
         pushMsg("column name comment list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1936:5: ( columnNameComment ( COMMA columnNameComment )* -> ^( TOK_TABCOLNAME ( columnNameComment )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1936:7: columnNameComment ( COMMA columnNameComment )*
            {
            pushFollow(FOLLOW_columnNameComment_in_columnNameCommentList11593);
            columnNameComment746=columnNameComment();

            state._fsp--;

            stream_columnNameComment.add(columnNameComment746.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1936:25: ( COMMA columnNameComment )*
            loop213:
            do {
                int alt213=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt213=1;
                    }
                    break;

                }

                switch (alt213) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1936:26: COMMA columnNameComment
            	    {
            	    COMMA747=(Token)match(input,COMMA,FOLLOW_COMMA_in_columnNameCommentList11596);  
            	    stream_COMMA.add(COMMA747);


            	    pushFollow(FOLLOW_columnNameComment_in_columnNameCommentList11598);
            	    columnNameComment748=columnNameComment();

            	    state._fsp--;

            	    stream_columnNameComment.add(columnNameComment748.getTree());

            	    }
            	    break;

            	default :
            	    break loop213;
                }
            } while (true);


            // AST REWRITE
            // elements: columnNameComment
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1936:52: -> ^( TOK_TABCOLNAME ( columnNameComment )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1936:55: ^( TOK_TABCOLNAME ( columnNameComment )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOLNAME, "TOK_TABCOLNAME")
                , root_1);

                if ( !(stream_columnNameComment.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_columnNameComment.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnNameComment.nextTree());

                }
                stream_columnNameComment.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameCommentList"


    public static class columnNameComment_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameComment"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1939:1: columnNameComment : colName= identifier ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_TABCOL $colName TOK_NULL ( $comment)? ) ;
    public final HiveParser.columnNameComment_return columnNameComment() throws RecognitionException {
        HiveParser.columnNameComment_return retval = new HiveParser.columnNameComment_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_COMMENT749=null;
        HiveParser_IdentifiersParser.identifier_return colName =null;


        CommonTree comment_tree=null;
        CommonTree KW_COMMENT749_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("column name comment", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1942:5: (colName= identifier ( KW_COMMENT comment= StringLiteral )? -> ^( TOK_TABCOL $colName TOK_NULL ( $comment)? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1942:7: colName= identifier ( KW_COMMENT comment= StringLiteral )?
            {
            pushFollow(FOLLOW_identifier_in_columnNameComment11638);
            colName=identifier();

            state._fsp--;

            stream_identifier.add(colName.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1942:26: ( KW_COMMENT comment= StringLiteral )?
            int alt214=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt214=1;
                    }
                    break;
            }

            switch (alt214) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1942:27: KW_COMMENT comment= StringLiteral
                    {
                    KW_COMMENT749=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_columnNameComment11641);  
                    stream_KW_COMMENT.add(KW_COMMENT749);


                    comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_columnNameComment11645);  
                    stream_StringLiteral.add(comment);


                    }
                    break;

            }


            // AST REWRITE
            // elements: comment, colName
            // token labels: comment
            // rule labels: retval, colName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1943:5: -> ^( TOK_TABCOL $colName TOK_NULL ( $comment)? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1943:8: ^( TOK_TABCOL $colName TOK_NULL ( $comment)? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOL, "TOK_TABCOL")
                , root_1);

                adaptor.addChild(root_1, stream_colName.nextTree());

                adaptor.addChild(root_1, 
                (CommonTree)adaptor.create(TOK_NULL, "TOK_NULL")
                );

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1943:40: ( $comment)?
                if ( stream_comment.hasNext() ) {
                    adaptor.addChild(root_1, stream_comment.nextNode());

                }
                stream_comment.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameComment"


    public static class columnRefOrder_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnRefOrder"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1946:1: columnRefOrder : expression (asc= KW_ASC |desc= KW_DESC )? -> {$desc == null}? ^( TOK_TABSORTCOLNAMEASC expression ) -> ^( TOK_TABSORTCOLNAMEDESC expression ) ;
    public final HiveParser.columnRefOrder_return columnRefOrder() throws RecognitionException {
        HiveParser.columnRefOrder_return retval = new HiveParser.columnRefOrder_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token asc=null;
        Token desc=null;
        HiveParser_IdentifiersParser.expression_return expression750 =null;


        CommonTree asc_tree=null;
        CommonTree desc_tree=null;
        RewriteRuleTokenStream stream_KW_DESC=new RewriteRuleTokenStream(adaptor,"token KW_DESC");
        RewriteRuleTokenStream stream_KW_ASC=new RewriteRuleTokenStream(adaptor,"token KW_ASC");
        RewriteRuleSubtreeStream stream_expression=new RewriteRuleSubtreeStream(adaptor,"rule expression");
         pushMsg("column order", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1949:5: ( expression (asc= KW_ASC |desc= KW_DESC )? -> {$desc == null}? ^( TOK_TABSORTCOLNAMEASC expression ) -> ^( TOK_TABSORTCOLNAMEDESC expression ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1949:7: expression (asc= KW_ASC |desc= KW_DESC )?
            {
            pushFollow(FOLLOW_expression_in_columnRefOrder11693);
            expression750=expression();

            state._fsp--;

            stream_expression.add(expression750.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1949:18: (asc= KW_ASC |desc= KW_DESC )?
            int alt215=3;
            switch ( input.LA(1) ) {
                case KW_ASC:
                    {
                    alt215=1;
                    }
                    break;
                case KW_DESC:
                    {
                    alt215=2;
                    }
                    break;
            }

            switch (alt215) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1949:19: asc= KW_ASC
                    {
                    asc=(Token)match(input,KW_ASC,FOLLOW_KW_ASC_in_columnRefOrder11698);  
                    stream_KW_ASC.add(asc);


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1949:32: desc= KW_DESC
                    {
                    desc=(Token)match(input,KW_DESC,FOLLOW_KW_DESC_in_columnRefOrder11704);  
                    stream_KW_DESC.add(desc);


                    }
                    break;

            }


            // AST REWRITE
            // elements: expression, expression
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1950:5: -> {$desc == null}? ^( TOK_TABSORTCOLNAMEASC expression )
            if (desc == null) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1950:25: ^( TOK_TABSORTCOLNAMEASC expression )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC")
                , root_1);

                adaptor.addChild(root_1, stream_expression.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1951:5: -> ^( TOK_TABSORTCOLNAMEDESC expression )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1951:25: ^( TOK_TABSORTCOLNAMEDESC expression )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABSORTCOLNAMEDESC, "TOK_TABSORTCOLNAMEDESC")
                , root_1);

                adaptor.addChild(root_1, stream_expression.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnRefOrder"


    public static class columnNameType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1954:1: columnNameType : colName= identifier colType ( KW_COMMENT comment= StringLiteral )? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) ;
    public final HiveParser.columnNameType_return columnNameType() throws RecognitionException {
        HiveParser.columnNameType_return retval = new HiveParser.columnNameType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token KW_COMMENT752=null;
        HiveParser_IdentifiersParser.identifier_return colName =null;

        HiveParser.colType_return colType751 =null;


        CommonTree comment_tree=null;
        CommonTree KW_COMMENT752_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
        RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("column specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1957:5: (colName= identifier colType ( KW_COMMENT comment= StringLiteral )? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1957:7: colName= identifier colType ( KW_COMMENT comment= StringLiteral )?
            {
            pushFollow(FOLLOW_identifier_in_columnNameType11778);
            colName=identifier();

            state._fsp--;

            stream_identifier.add(colName.getTree());

            pushFollow(FOLLOW_colType_in_columnNameType11780);
            colType751=colType();

            state._fsp--;

            stream_colType.add(colType751.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1957:34: ( KW_COMMENT comment= StringLiteral )?
            int alt216=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt216=1;
                    }
                    break;
            }

            switch (alt216) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1957:35: KW_COMMENT comment= StringLiteral
                    {
                    KW_COMMENT752=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_columnNameType11783);  
                    stream_KW_COMMENT.add(KW_COMMENT752);


                    comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_columnNameType11787);  
                    stream_StringLiteral.add(comment);


                    }
                    break;

            }


            // AST REWRITE
            // elements: colName, comment, colType, colType, colName
            // token labels: comment
            // rule labels: retval, colName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1958:5: -> {$comment == null}? ^( TOK_TABCOL $colName colType )
            if (comment == null) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1958:28: ^( TOK_TABCOL $colName colType )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOL, "TOK_TABCOL")
                , root_1);

                adaptor.addChild(root_1, stream_colName.nextTree());

                adaptor.addChild(root_1, stream_colType.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1959:5: -> ^( TOK_TABCOL $colName colType $comment)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1959:28: ^( TOK_TABCOL $colName colType $comment)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOL, "TOK_TABCOL")
                , root_1);

                adaptor.addChild(root_1, stream_colName.nextTree());

                adaptor.addChild(root_1, stream_colType.nextTree());

                adaptor.addChild(root_1, stream_comment.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameType"


    public static class columnNameColonType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnNameColonType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1962:1: columnNameColonType : colName= identifier COLON colType ( KW_COMMENT comment= StringLiteral )? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) ;
    public final HiveParser.columnNameColonType_return columnNameColonType() throws RecognitionException {
        HiveParser.columnNameColonType_return retval = new HiveParser.columnNameColonType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token comment=null;
        Token COLON753=null;
        Token KW_COMMENT755=null;
        HiveParser_IdentifiersParser.identifier_return colName =null;

        HiveParser.colType_return colType754 =null;


        CommonTree comment_tree=null;
        CommonTree COLON753_tree=null;
        CommonTree KW_COMMENT755_tree=null;
        RewriteRuleTokenStream stream_COLON=new RewriteRuleTokenStream(adaptor,"token COLON");
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_COMMENT=new RewriteRuleTokenStream(adaptor,"token KW_COMMENT");
        RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
         pushMsg("column specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1965:5: (colName= identifier COLON colType ( KW_COMMENT comment= StringLiteral )? -> {$comment == null}? ^( TOK_TABCOL $colName colType ) -> ^( TOK_TABCOL $colName colType $comment) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1965:7: colName= identifier COLON colType ( KW_COMMENT comment= StringLiteral )?
            {
            pushFollow(FOLLOW_identifier_in_columnNameColonType11873);
            colName=identifier();

            state._fsp--;

            stream_identifier.add(colName.getTree());

            COLON753=(Token)match(input,COLON,FOLLOW_COLON_in_columnNameColonType11875);  
            stream_COLON.add(COLON753);


            pushFollow(FOLLOW_colType_in_columnNameColonType11877);
            colType754=colType();

            state._fsp--;

            stream_colType.add(colType754.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1965:40: ( KW_COMMENT comment= StringLiteral )?
            int alt217=2;
            switch ( input.LA(1) ) {
                case KW_COMMENT:
                    {
                    alt217=1;
                    }
                    break;
            }

            switch (alt217) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1965:41: KW_COMMENT comment= StringLiteral
                    {
                    KW_COMMENT755=(Token)match(input,KW_COMMENT,FOLLOW_KW_COMMENT_in_columnNameColonType11880);  
                    stream_KW_COMMENT.add(KW_COMMENT755);


                    comment=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_columnNameColonType11884);  
                    stream_StringLiteral.add(comment);


                    }
                    break;

            }


            // AST REWRITE
            // elements: colType, colType, colName, comment, colName
            // token labels: comment
            // rule labels: retval, colName
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_comment=new RewriteRuleTokenStream(adaptor,"token comment",comment);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_colName=new RewriteRuleSubtreeStream(adaptor,"rule colName",colName!=null?colName.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1966:5: -> {$comment == null}? ^( TOK_TABCOL $colName colType )
            if (comment == null) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1966:28: ^( TOK_TABCOL $colName colType )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOL, "TOK_TABCOL")
                , root_1);

                adaptor.addChild(root_1, stream_colName.nextTree());

                adaptor.addChild(root_1, stream_colType.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 1967:5: -> ^( TOK_TABCOL $colName colType $comment)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1967:28: ^( TOK_TABCOL $colName colType $comment)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_TABCOL, "TOK_TABCOL")
                , root_1);

                adaptor.addChild(root_1, stream_colName.nextTree());

                adaptor.addChild(root_1, stream_colType.nextTree());

                adaptor.addChild(root_1, stream_comment.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnNameColonType"


    public static class colType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "colType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1970:1: colType : type ;
    public final HiveParser.colType_return colType() throws RecognitionException {
        HiveParser.colType_return retval = new HiveParser.colType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.type_return type756 =null;



         pushMsg("column type", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1973:5: ( type )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1973:7: type
            {
            root_0 = (CommonTree)adaptor.nil();


            pushFollow(FOLLOW_type_in_colType11968);
            type756=type();

            state._fsp--;

            adaptor.addChild(root_0, type756.getTree());

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "colType"


    public static class colTypeList_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "colTypeList"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1976:1: colTypeList : colType ( COMMA colType )* -> ^( TOK_COLTYPELIST ( colType )+ ) ;
    public final HiveParser.colTypeList_return colTypeList() throws RecognitionException {
        HiveParser.colTypeList_return retval = new HiveParser.colTypeList_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token COMMA758=null;
        HiveParser.colType_return colType757 =null;

        HiveParser.colType_return colType759 =null;


        CommonTree COMMA758_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleSubtreeStream stream_colType=new RewriteRuleSubtreeStream(adaptor,"rule colType");
         pushMsg("column type list", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:5: ( colType ( COMMA colType )* -> ^( TOK_COLTYPELIST ( colType )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:7: colType ( COMMA colType )*
            {
            pushFollow(FOLLOW_colType_in_colTypeList11995);
            colType757=colType();

            state._fsp--;

            stream_colType.add(colType757.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:15: ( COMMA colType )*
            loop218:
            do {
                int alt218=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt218=1;
                    }
                    break;

                }

                switch (alt218) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:16: COMMA colType
            	    {
            	    COMMA758=(Token)match(input,COMMA,FOLLOW_COMMA_in_colTypeList11998);  
            	    stream_COMMA.add(COMMA758);


            	    pushFollow(FOLLOW_colType_in_colTypeList12000);
            	    colType759=colType();

            	    state._fsp--;

            	    stream_colType.add(colType759.getTree());

            	    }
            	    break;

            	default :
            	    break loop218;
                }
            } while (true);


            // AST REWRITE
            // elements: colType
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 1979:32: -> ^( TOK_COLTYPELIST ( colType )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:1979:35: ^( TOK_COLTYPELIST ( colType )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_COLTYPELIST, "TOK_COLTYPELIST")
                , root_1);

                if ( !(stream_colType.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_colType.hasNext() ) {
                    adaptor.addChild(root_1, stream_colType.nextTree());

                }
                stream_colType.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "colTypeList"


    public static class type_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "type"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1982:1: type : ( primitiveType | listType | structType | mapType | unionType );
    public final HiveParser.type_return type() throws RecognitionException {
        HiveParser.type_return retval = new HiveParser.type_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.primitiveType_return primitiveType760 =null;

        HiveParser.listType_return listType761 =null;

        HiveParser.structType_return structType762 =null;

        HiveParser.mapType_return mapType763 =null;

        HiveParser.unionType_return unionType764 =null;



        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1983:5: ( primitiveType | listType | structType | mapType | unionType )
            int alt219=5;
            switch ( input.LA(1) ) {
            case KW_BIGINT:
            case KW_BINARY:
            case KW_BOOLEAN:
            case KW_CHAR:
            case KW_DATE:
            case KW_DATETIME:
            case KW_DECIMAL:
            case KW_DOUBLE:
            case KW_FLOAT:
            case KW_INT:
            case KW_SMALLINT:
            case KW_STRING:
            case KW_TIMESTAMP:
            case KW_TINYINT:
            case KW_VARCHAR:
                {
                alt219=1;
                }
                break;
            case KW_ARRAY:
                {
                alt219=2;
                }
                break;
            case KW_STRUCT:
                {
                alt219=3;
                }
                break;
            case KW_MAP:
                {
                alt219=4;
                }
                break;
            case KW_UNIONTYPE:
                {
                alt219=5;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 219, 0, input);

                throw nvae;

            }

            switch (alt219) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1983:7: primitiveType
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_primitiveType_in_type12028);
                    primitiveType760=primitiveType();

                    state._fsp--;

                    adaptor.addChild(root_0, primitiveType760.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1984:7: listType
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_listType_in_type12036);
                    listType761=listType();

                    state._fsp--;

                    adaptor.addChild(root_0, listType761.getTree());

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1985:7: structType
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_structType_in_type12044);
                    structType762=structType();

                    state._fsp--;

                    adaptor.addChild(root_0, structType762.getTree());

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1986:7: mapType
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_mapType_in_type12052);
                    mapType763=mapType();

                    state._fsp--;

                    adaptor.addChild(root_0, mapType763.getTree());

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1987:7: unionType
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_unionType_in_type12060);
                    unionType764=unionType();

                    state._fsp--;

                    adaptor.addChild(root_0, unionType764.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "type"


    public static class primitiveType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "primitiveType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1989:1: primitiveType : ( KW_TINYINT -> TOK_TINYINT | KW_SMALLINT -> TOK_SMALLINT | KW_INT -> TOK_INT | KW_BIGINT -> TOK_BIGINT | KW_BOOLEAN -> TOK_BOOLEAN | KW_FLOAT -> TOK_FLOAT | KW_DOUBLE -> TOK_DOUBLE | KW_DATE -> TOK_DATE | KW_DATETIME -> TOK_DATETIME | KW_TIMESTAMP -> TOK_TIMESTAMP | KW_STRING -> TOK_STRING | KW_BINARY -> TOK_BINARY | KW_DECIMAL ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )? -> ^( TOK_DECIMAL ( $prec)? ( $scale)? ) | KW_VARCHAR LPAREN length= Number RPAREN -> ^( TOK_VARCHAR $length) | KW_CHAR LPAREN length= Number RPAREN -> ^( TOK_CHAR $length) );
    public final HiveParser.primitiveType_return primitiveType() throws RecognitionException {
        HiveParser.primitiveType_return retval = new HiveParser.primitiveType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token prec=null;
        Token scale=null;
        Token length=null;
        Token KW_TINYINT765=null;
        Token KW_SMALLINT766=null;
        Token KW_INT767=null;
        Token KW_BIGINT768=null;
        Token KW_BOOLEAN769=null;
        Token KW_FLOAT770=null;
        Token KW_DOUBLE771=null;
        Token KW_DATE772=null;
        Token KW_DATETIME773=null;
        Token KW_TIMESTAMP774=null;
        Token KW_STRING775=null;
        Token KW_BINARY776=null;
        Token KW_DECIMAL777=null;
        Token LPAREN778=null;
        Token COMMA779=null;
        Token RPAREN780=null;
        Token KW_VARCHAR781=null;
        Token LPAREN782=null;
        Token RPAREN783=null;
        Token KW_CHAR784=null;
        Token LPAREN785=null;
        Token RPAREN786=null;

        CommonTree prec_tree=null;
        CommonTree scale_tree=null;
        CommonTree length_tree=null;
        CommonTree KW_TINYINT765_tree=null;
        CommonTree KW_SMALLINT766_tree=null;
        CommonTree KW_INT767_tree=null;
        CommonTree KW_BIGINT768_tree=null;
        CommonTree KW_BOOLEAN769_tree=null;
        CommonTree KW_FLOAT770_tree=null;
        CommonTree KW_DOUBLE771_tree=null;
        CommonTree KW_DATE772_tree=null;
        CommonTree KW_DATETIME773_tree=null;
        CommonTree KW_TIMESTAMP774_tree=null;
        CommonTree KW_STRING775_tree=null;
        CommonTree KW_BINARY776_tree=null;
        CommonTree KW_DECIMAL777_tree=null;
        CommonTree LPAREN778_tree=null;
        CommonTree COMMA779_tree=null;
        CommonTree RPAREN780_tree=null;
        CommonTree KW_VARCHAR781_tree=null;
        CommonTree LPAREN782_tree=null;
        CommonTree RPAREN783_tree=null;
        CommonTree KW_CHAR784_tree=null;
        CommonTree LPAREN785_tree=null;
        CommonTree RPAREN786_tree=null;
        RewriteRuleTokenStream stream_KW_DATETIME=new RewriteRuleTokenStream(adaptor,"token KW_DATETIME");
        RewriteRuleTokenStream stream_KW_VARCHAR=new RewriteRuleTokenStream(adaptor,"token KW_VARCHAR");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_KW_DATE=new RewriteRuleTokenStream(adaptor,"token KW_DATE");
        RewriteRuleTokenStream stream_KW_TIMESTAMP=new RewriteRuleTokenStream(adaptor,"token KW_TIMESTAMP");
        RewriteRuleTokenStream stream_KW_STRING=new RewriteRuleTokenStream(adaptor,"token KW_STRING");
        RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
        RewriteRuleTokenStream stream_KW_CHAR=new RewriteRuleTokenStream(adaptor,"token KW_CHAR");
        RewriteRuleTokenStream stream_KW_SMALLINT=new RewriteRuleTokenStream(adaptor,"token KW_SMALLINT");
        RewriteRuleTokenStream stream_KW_INT=new RewriteRuleTokenStream(adaptor,"token KW_INT");
        RewriteRuleTokenStream stream_KW_BINARY=new RewriteRuleTokenStream(adaptor,"token KW_BINARY");
        RewriteRuleTokenStream stream_KW_DECIMAL=new RewriteRuleTokenStream(adaptor,"token KW_DECIMAL");
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleTokenStream stream_KW_BOOLEAN=new RewriteRuleTokenStream(adaptor,"token KW_BOOLEAN");
        RewriteRuleTokenStream stream_KW_FLOAT=new RewriteRuleTokenStream(adaptor,"token KW_FLOAT");
        RewriteRuleTokenStream stream_KW_DOUBLE=new RewriteRuleTokenStream(adaptor,"token KW_DOUBLE");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleTokenStream stream_KW_BIGINT=new RewriteRuleTokenStream(adaptor,"token KW_BIGINT");
        RewriteRuleTokenStream stream_KW_TINYINT=new RewriteRuleTokenStream(adaptor,"token KW_TINYINT");

         pushMsg("primitive type specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:1992:5: ( KW_TINYINT -> TOK_TINYINT | KW_SMALLINT -> TOK_SMALLINT | KW_INT -> TOK_INT | KW_BIGINT -> TOK_BIGINT | KW_BOOLEAN -> TOK_BOOLEAN | KW_FLOAT -> TOK_FLOAT | KW_DOUBLE -> TOK_DOUBLE | KW_DATE -> TOK_DATE | KW_DATETIME -> TOK_DATETIME | KW_TIMESTAMP -> TOK_TIMESTAMP | KW_STRING -> TOK_STRING | KW_BINARY -> TOK_BINARY | KW_DECIMAL ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )? -> ^( TOK_DECIMAL ( $prec)? ( $scale)? ) | KW_VARCHAR LPAREN length= Number RPAREN -> ^( TOK_VARCHAR $length) | KW_CHAR LPAREN length= Number RPAREN -> ^( TOK_CHAR $length) )
            int alt222=15;
            switch ( input.LA(1) ) {
            case KW_TINYINT:
                {
                alt222=1;
                }
                break;
            case KW_SMALLINT:
                {
                alt222=2;
                }
                break;
            case KW_INT:
                {
                alt222=3;
                }
                break;
            case KW_BIGINT:
                {
                alt222=4;
                }
                break;
            case KW_BOOLEAN:
                {
                alt222=5;
                }
                break;
            case KW_FLOAT:
                {
                alt222=6;
                }
                break;
            case KW_DOUBLE:
                {
                alt222=7;
                }
                break;
            case KW_DATE:
                {
                alt222=8;
                }
                break;
            case KW_DATETIME:
                {
                alt222=9;
                }
                break;
            case KW_TIMESTAMP:
                {
                alt222=10;
                }
                break;
            case KW_STRING:
                {
                alt222=11;
                }
                break;
            case KW_BINARY:
                {
                alt222=12;
                }
                break;
            case KW_DECIMAL:
                {
                alt222=13;
                }
                break;
            case KW_VARCHAR:
                {
                alt222=14;
                }
                break;
            case KW_CHAR:
                {
                alt222=15;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 222, 0, input);

                throw nvae;

            }

            switch (alt222) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1992:7: KW_TINYINT
                    {
                    KW_TINYINT765=(Token)match(input,KW_TINYINT,FOLLOW_KW_TINYINT_in_primitiveType12082);  
                    stream_KW_TINYINT.add(KW_TINYINT765);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1992:24: -> TOK_TINYINT
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_TINYINT, "TOK_TINYINT")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1993:7: KW_SMALLINT
                    {
                    KW_SMALLINT766=(Token)match(input,KW_SMALLINT,FOLLOW_KW_SMALLINT_in_primitiveType12103);  
                    stream_KW_SMALLINT.add(KW_SMALLINT766);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1993:24: -> TOK_SMALLINT
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_SMALLINT, "TOK_SMALLINT")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1994:7: KW_INT
                    {
                    KW_INT767=(Token)match(input,KW_INT,FOLLOW_KW_INT_in_primitiveType12123);  
                    stream_KW_INT.add(KW_INT767);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1994:24: -> TOK_INT
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_INT, "TOK_INT")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 4 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1995:7: KW_BIGINT
                    {
                    KW_BIGINT768=(Token)match(input,KW_BIGINT,FOLLOW_KW_BIGINT_in_primitiveType12148);  
                    stream_KW_BIGINT.add(KW_BIGINT768);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1995:24: -> TOK_BIGINT
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_BIGINT, "TOK_BIGINT")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 5 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1996:7: KW_BOOLEAN
                    {
                    KW_BOOLEAN769=(Token)match(input,KW_BOOLEAN,FOLLOW_KW_BOOLEAN_in_primitiveType12170);  
                    stream_KW_BOOLEAN.add(KW_BOOLEAN769);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1996:24: -> TOK_BOOLEAN
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_BOOLEAN, "TOK_BOOLEAN")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 6 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1997:7: KW_FLOAT
                    {
                    KW_FLOAT770=(Token)match(input,KW_FLOAT,FOLLOW_KW_FLOAT_in_primitiveType12191);  
                    stream_KW_FLOAT.add(KW_FLOAT770);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1997:24: -> TOK_FLOAT
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_FLOAT, "TOK_FLOAT")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 7 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1998:7: KW_DOUBLE
                    {
                    KW_DOUBLE771=(Token)match(input,KW_DOUBLE,FOLLOW_KW_DOUBLE_in_primitiveType12214);  
                    stream_KW_DOUBLE.add(KW_DOUBLE771);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1998:24: -> TOK_DOUBLE
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_DOUBLE, "TOK_DOUBLE")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 8 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:1999:7: KW_DATE
                    {
                    KW_DATE772=(Token)match(input,KW_DATE,FOLLOW_KW_DATE_in_primitiveType12236);  
                    stream_KW_DATE.add(KW_DATE772);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 1999:24: -> TOK_DATE
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_DATE, "TOK_DATE")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 9 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2000:7: KW_DATETIME
                    {
                    KW_DATETIME773=(Token)match(input,KW_DATETIME,FOLLOW_KW_DATETIME_in_primitiveType12260);  
                    stream_KW_DATETIME.add(KW_DATETIME773);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2000:24: -> TOK_DATETIME
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_DATETIME, "TOK_DATETIME")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 10 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2001:7: KW_TIMESTAMP
                    {
                    KW_TIMESTAMP774=(Token)match(input,KW_TIMESTAMP,FOLLOW_KW_TIMESTAMP_in_primitiveType12280);  
                    stream_KW_TIMESTAMP.add(KW_TIMESTAMP774);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2001:24: -> TOK_TIMESTAMP
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_TIMESTAMP, "TOK_TIMESTAMP")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 11 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2002:7: KW_STRING
                    {
                    KW_STRING775=(Token)match(input,KW_STRING,FOLLOW_KW_STRING_in_primitiveType12299);  
                    stream_KW_STRING.add(KW_STRING775);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2002:24: -> TOK_STRING
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_STRING, "TOK_STRING")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 12 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2003:7: KW_BINARY
                    {
                    KW_BINARY776=(Token)match(input,KW_BINARY,FOLLOW_KW_BINARY_in_primitiveType12321);  
                    stream_KW_BINARY.add(KW_BINARY776);


                    // AST REWRITE
                    // elements: 
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2003:24: -> TOK_BINARY
                    {
                        adaptor.addChild(root_0, 
                        (CommonTree)adaptor.create(TOK_BINARY, "TOK_BINARY")
                        );

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 13 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:7: KW_DECIMAL ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )?
                    {
                    KW_DECIMAL777=(Token)match(input,KW_DECIMAL,FOLLOW_KW_DECIMAL_in_primitiveType12343);  
                    stream_KW_DECIMAL.add(KW_DECIMAL777);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:18: ( LPAREN prec= Number ( COMMA scale= Number )? RPAREN )?
                    int alt221=2;
                    switch ( input.LA(1) ) {
                        case LPAREN:
                            {
                            alt221=1;
                            }
                            break;
                    }

                    switch (alt221) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:19: LPAREN prec= Number ( COMMA scale= Number )? RPAREN
                            {
                            LPAREN778=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_primitiveType12346);  
                            stream_LPAREN.add(LPAREN778);


                            prec=(Token)match(input,Number,FOLLOW_Number_in_primitiveType12350);  
                            stream_Number.add(prec);


                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:38: ( COMMA scale= Number )?
                            int alt220=2;
                            switch ( input.LA(1) ) {
                                case COMMA:
                                    {
                                    alt220=1;
                                    }
                                    break;
                            }

                            switch (alt220) {
                                case 1 :
                                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:39: COMMA scale= Number
                                    {
                                    COMMA779=(Token)match(input,COMMA,FOLLOW_COMMA_in_primitiveType12353);  
                                    stream_COMMA.add(COMMA779);


                                    scale=(Token)match(input,Number,FOLLOW_Number_in_primitiveType12357);  
                                    stream_Number.add(scale);


                                    }
                                    break;

                            }


                            RPAREN780=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_primitiveType12361);  
                            stream_RPAREN.add(RPAREN780);


                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: scale, prec
                    // token labels: scale, prec
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_scale=new RewriteRuleTokenStream(adaptor,"token scale",scale);
                    RewriteRuleTokenStream stream_prec=new RewriteRuleTokenStream(adaptor,"token prec",prec);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2004:69: -> ^( TOK_DECIMAL ( $prec)? ( $scale)? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:72: ^( TOK_DECIMAL ( $prec)? ( $scale)? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DECIMAL, "TOK_DECIMAL")
                        , root_1);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:87: ( $prec)?
                        if ( stream_prec.hasNext() ) {
                            adaptor.addChild(root_1, stream_prec.nextNode());

                        }
                        stream_prec.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2004:94: ( $scale)?
                        if ( stream_scale.hasNext() ) {
                            adaptor.addChild(root_1, stream_scale.nextNode());

                        }
                        stream_scale.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 14 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2005:7: KW_VARCHAR LPAREN length= Number RPAREN
                    {
                    KW_VARCHAR781=(Token)match(input,KW_VARCHAR,FOLLOW_KW_VARCHAR_in_primitiveType12385);  
                    stream_KW_VARCHAR.add(KW_VARCHAR781);


                    LPAREN782=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_primitiveType12387);  
                    stream_LPAREN.add(LPAREN782);


                    length=(Token)match(input,Number,FOLLOW_Number_in_primitiveType12391);  
                    stream_Number.add(length);


                    RPAREN783=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_primitiveType12393);  
                    stream_RPAREN.add(RPAREN783);


                    // AST REWRITE
                    // elements: length
                    // token labels: length
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_length=new RewriteRuleTokenStream(adaptor,"token length",length);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2005:51: -> ^( TOK_VARCHAR $length)
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2005:57: ^( TOK_VARCHAR $length)
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_VARCHAR, "TOK_VARCHAR")
                        , root_1);

                        adaptor.addChild(root_1, stream_length.nextNode());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 15 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2006:7: KW_CHAR LPAREN length= Number RPAREN
                    {
                    KW_CHAR784=(Token)match(input,KW_CHAR,FOLLOW_KW_CHAR_in_primitiveType12418);  
                    stream_KW_CHAR.add(KW_CHAR784);


                    LPAREN785=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_primitiveType12420);  
                    stream_LPAREN.add(LPAREN785);


                    length=(Token)match(input,Number,FOLLOW_Number_in_primitiveType12424);  
                    stream_Number.add(length);


                    RPAREN786=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_primitiveType12426);  
                    stream_RPAREN.add(RPAREN786);


                    // AST REWRITE
                    // elements: length
                    // token labels: length
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleTokenStream stream_length=new RewriteRuleTokenStream(adaptor,"token length",length);
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2006:48: -> ^( TOK_CHAR $length)
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2006:54: ^( TOK_CHAR $length)
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_CHAR, "TOK_CHAR")
                        , root_1);

                        adaptor.addChild(root_1, stream_length.nextNode());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "primitiveType"


    public static class listType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "listType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2009:1: listType : KW_ARRAY LESSTHAN type GREATERTHAN -> ^( TOK_LIST type ) ;
    public final HiveParser.listType_return listType() throws RecognitionException {
        HiveParser.listType_return retval = new HiveParser.listType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_ARRAY787=null;
        Token LESSTHAN788=null;
        Token GREATERTHAN790=null;
        HiveParser.type_return type789 =null;


        CommonTree KW_ARRAY787_tree=null;
        CommonTree LESSTHAN788_tree=null;
        CommonTree GREATERTHAN790_tree=null;
        RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
        RewriteRuleTokenStream stream_KW_ARRAY=new RewriteRuleTokenStream(adaptor,"token KW_ARRAY");
        RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
        RewriteRuleSubtreeStream stream_type=new RewriteRuleSubtreeStream(adaptor,"rule type");
         pushMsg("list type", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2012:5: ( KW_ARRAY LESSTHAN type GREATERTHAN -> ^( TOK_LIST type ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2012:7: KW_ARRAY LESSTHAN type GREATERTHAN
            {
            KW_ARRAY787=(Token)match(input,KW_ARRAY,FOLLOW_KW_ARRAY_in_listType12470);  
            stream_KW_ARRAY.add(KW_ARRAY787);


            LESSTHAN788=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_listType12472);  
            stream_LESSTHAN.add(LESSTHAN788);


            pushFollow(FOLLOW_type_in_listType12474);
            type789=type();

            state._fsp--;

            stream_type.add(type789.getTree());

            GREATERTHAN790=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_listType12476);  
            stream_GREATERTHAN.add(GREATERTHAN790);


            // AST REWRITE
            // elements: type
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2012:44: -> ^( TOK_LIST type )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2012:47: ^( TOK_LIST type )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_LIST, "TOK_LIST")
                , root_1);

                adaptor.addChild(root_1, stream_type.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "listType"


    public static class structType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "structType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2015:1: structType : KW_STRUCT LESSTHAN columnNameColonTypeList GREATERTHAN -> ^( TOK_STRUCT columnNameColonTypeList ) ;
    public final HiveParser.structType_return structType() throws RecognitionException {
        HiveParser.structType_return retval = new HiveParser.structType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_STRUCT791=null;
        Token LESSTHAN792=null;
        Token GREATERTHAN794=null;
        HiveParser.columnNameColonTypeList_return columnNameColonTypeList793 =null;


        CommonTree KW_STRUCT791_tree=null;
        CommonTree LESSTHAN792_tree=null;
        CommonTree GREATERTHAN794_tree=null;
        RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
        RewriteRuleTokenStream stream_KW_STRUCT=new RewriteRuleTokenStream(adaptor,"token KW_STRUCT");
        RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
        RewriteRuleSubtreeStream stream_columnNameColonTypeList=new RewriteRuleSubtreeStream(adaptor,"rule columnNameColonTypeList");
         pushMsg("struct type", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2018:5: ( KW_STRUCT LESSTHAN columnNameColonTypeList GREATERTHAN -> ^( TOK_STRUCT columnNameColonTypeList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2018:7: KW_STRUCT LESSTHAN columnNameColonTypeList GREATERTHAN
            {
            KW_STRUCT791=(Token)match(input,KW_STRUCT,FOLLOW_KW_STRUCT_in_structType12513);  
            stream_KW_STRUCT.add(KW_STRUCT791);


            LESSTHAN792=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_structType12515);  
            stream_LESSTHAN.add(LESSTHAN792);


            pushFollow(FOLLOW_columnNameColonTypeList_in_structType12517);
            columnNameColonTypeList793=columnNameColonTypeList();

            state._fsp--;

            stream_columnNameColonTypeList.add(columnNameColonTypeList793.getTree());

            GREATERTHAN794=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_structType12519);  
            stream_GREATERTHAN.add(GREATERTHAN794);


            // AST REWRITE
            // elements: columnNameColonTypeList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2018:62: -> ^( TOK_STRUCT columnNameColonTypeList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2018:65: ^( TOK_STRUCT columnNameColonTypeList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_STRUCT, "TOK_STRUCT")
                , root_1);

                adaptor.addChild(root_1, stream_columnNameColonTypeList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "structType"


    public static class mapType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "mapType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2021:1: mapType : KW_MAP LESSTHAN left= primitiveType COMMA right= type GREATERTHAN -> ^( TOK_MAP $left $right) ;
    public final HiveParser.mapType_return mapType() throws RecognitionException {
        HiveParser.mapType_return retval = new HiveParser.mapType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_MAP795=null;
        Token LESSTHAN796=null;
        Token COMMA797=null;
        Token GREATERTHAN798=null;
        HiveParser.primitiveType_return left =null;

        HiveParser.type_return right =null;


        CommonTree KW_MAP795_tree=null;
        CommonTree LESSTHAN796_tree=null;
        CommonTree COMMA797_tree=null;
        CommonTree GREATERTHAN798_tree=null;
        RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
        RewriteRuleTokenStream stream_KW_MAP=new RewriteRuleTokenStream(adaptor,"token KW_MAP");
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
        RewriteRuleSubtreeStream stream_primitiveType=new RewriteRuleSubtreeStream(adaptor,"rule primitiveType");
        RewriteRuleSubtreeStream stream_type=new RewriteRuleSubtreeStream(adaptor,"rule type");
         pushMsg("map type", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2024:5: ( KW_MAP LESSTHAN left= primitiveType COMMA right= type GREATERTHAN -> ^( TOK_MAP $left $right) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2024:7: KW_MAP LESSTHAN left= primitiveType COMMA right= type GREATERTHAN
            {
            KW_MAP795=(Token)match(input,KW_MAP,FOLLOW_KW_MAP_in_mapType12554);  
            stream_KW_MAP.add(KW_MAP795);


            LESSTHAN796=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_mapType12556);  
            stream_LESSTHAN.add(LESSTHAN796);


            pushFollow(FOLLOW_primitiveType_in_mapType12560);
            left=primitiveType();

            state._fsp--;

            stream_primitiveType.add(left.getTree());

            COMMA797=(Token)match(input,COMMA,FOLLOW_COMMA_in_mapType12562);  
            stream_COMMA.add(COMMA797);


            pushFollow(FOLLOW_type_in_mapType12566);
            right=type();

            state._fsp--;

            stream_type.add(right.getTree());

            GREATERTHAN798=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_mapType12568);  
            stream_GREATERTHAN.add(GREATERTHAN798);


            // AST REWRITE
            // elements: left, right
            // token labels: 
            // rule labels: retval, left, right
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            RewriteRuleSubtreeStream stream_left=new RewriteRuleSubtreeStream(adaptor,"rule left",left!=null?left.tree:null);
            RewriteRuleSubtreeStream stream_right=new RewriteRuleSubtreeStream(adaptor,"rule right",right!=null?right.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2025:5: -> ^( TOK_MAP $left $right)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2025:8: ^( TOK_MAP $left $right)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_MAP, "TOK_MAP")
                , root_1);

                adaptor.addChild(root_1, stream_left.nextTree());

                adaptor.addChild(root_1, stream_right.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "mapType"


    public static class unionType_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "unionType"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2028:1: unionType : KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN -> ^( TOK_UNIONTYPE colTypeList ) ;
    public final HiveParser.unionType_return unionType() throws RecognitionException {
        HiveParser.unionType_return retval = new HiveParser.unionType_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_UNIONTYPE799=null;
        Token LESSTHAN800=null;
        Token GREATERTHAN802=null;
        HiveParser.colTypeList_return colTypeList801 =null;


        CommonTree KW_UNIONTYPE799_tree=null;
        CommonTree LESSTHAN800_tree=null;
        CommonTree GREATERTHAN802_tree=null;
        RewriteRuleTokenStream stream_LESSTHAN=new RewriteRuleTokenStream(adaptor,"token LESSTHAN");
        RewriteRuleTokenStream stream_KW_UNIONTYPE=new RewriteRuleTokenStream(adaptor,"token KW_UNIONTYPE");
        RewriteRuleTokenStream stream_GREATERTHAN=new RewriteRuleTokenStream(adaptor,"token GREATERTHAN");
        RewriteRuleSubtreeStream stream_colTypeList=new RewriteRuleSubtreeStream(adaptor,"rule colTypeList");
         pushMsg("uniontype type", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2031:5: ( KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN -> ^( TOK_UNIONTYPE colTypeList ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2031:7: KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN
            {
            KW_UNIONTYPE799=(Token)match(input,KW_UNIONTYPE,FOLLOW_KW_UNIONTYPE_in_unionType12611);  
            stream_KW_UNIONTYPE.add(KW_UNIONTYPE799);


            LESSTHAN800=(Token)match(input,LESSTHAN,FOLLOW_LESSTHAN_in_unionType12613);  
            stream_LESSTHAN.add(LESSTHAN800);


            pushFollow(FOLLOW_colTypeList_in_unionType12615);
            colTypeList801=colTypeList();

            state._fsp--;

            stream_colTypeList.add(colTypeList801.getTree());

            GREATERTHAN802=(Token)match(input,GREATERTHAN,FOLLOW_GREATERTHAN_in_unionType12617);  
            stream_GREATERTHAN.add(GREATERTHAN802);


            // AST REWRITE
            // elements: colTypeList
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2031:53: -> ^( TOK_UNIONTYPE colTypeList )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2031:56: ^( TOK_UNIONTYPE colTypeList )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_UNIONTYPE, "TOK_UNIONTYPE")
                , root_1);

                adaptor.addChild(root_1, stream_colTypeList.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "unionType"


    public static class setOperator_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "setOperator"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2034:1: setOperator : KW_UNION KW_ALL -> ^( TOK_UNION ) ;
    public final HiveParser.setOperator_return setOperator() throws RecognitionException {
        HiveParser.setOperator_return retval = new HiveParser.setOperator_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_UNION803=null;
        Token KW_ALL804=null;

        CommonTree KW_UNION803_tree=null;
        CommonTree KW_ALL804_tree=null;
        RewriteRuleTokenStream stream_KW_ALL=new RewriteRuleTokenStream(adaptor,"token KW_ALL");
        RewriteRuleTokenStream stream_KW_UNION=new RewriteRuleTokenStream(adaptor,"token KW_UNION");

         pushMsg("set operator", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2037:5: ( KW_UNION KW_ALL -> ^( TOK_UNION ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2037:7: KW_UNION KW_ALL
            {
            KW_UNION803=(Token)match(input,KW_UNION,FOLLOW_KW_UNION_in_setOperator12652);  
            stream_KW_UNION.add(KW_UNION803);


            KW_ALL804=(Token)match(input,KW_ALL,FOLLOW_KW_ALL_in_setOperator12654);  
            stream_KW_ALL.add(KW_ALL804);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2037:23: -> ^( TOK_UNION )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2037:26: ^( TOK_UNION )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_UNION, "TOK_UNION")
                , root_1);

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "setOperator"


    public static class queryStatementExpression_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "queryStatementExpression"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2040:1: queryStatementExpression[boolean topLevel] : (w= withClause {...}?)? queryStatementExpressionBody[topLevel] -> queryStatementExpressionBody ;
    public final HiveParser.queryStatementExpression_return queryStatementExpression(boolean topLevel) throws RecognitionException {
        HiveParser.queryStatementExpression_return retval = new HiveParser.queryStatementExpression_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.withClause_return w =null;

        HiveParser.queryStatementExpressionBody_return queryStatementExpressionBody805 =null;


        RewriteRuleSubtreeStream stream_queryStatementExpressionBody=new RewriteRuleSubtreeStream(adaptor,"rule queryStatementExpressionBody");
        RewriteRuleSubtreeStream stream_withClause=new RewriteRuleSubtreeStream(adaptor,"rule withClause");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2041:5: ( (w= withClause {...}?)? queryStatementExpressionBody[topLevel] -> queryStatementExpressionBody )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2046:5: (w= withClause {...}?)? queryStatementExpressionBody[topLevel]
            {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2046:5: (w= withClause {...}?)?
            int alt223=2;
            switch ( input.LA(1) ) {
                case KW_WITH:
                    {
                    alt223=1;
                    }
                    break;
            }

            switch (alt223) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2046:6: w= withClause {...}?
                    {
                    pushFollow(FOLLOW_withClause_in_queryStatementExpression12691);
                    w=withClause();

                    state._fsp--;

                    stream_withClause.add(w.getTree());

                    if ( !((topLevel)) ) {
                        throw new FailedPredicateException(input, "queryStatementExpression", "topLevel");
                    }

                    }
                    break;

            }


            pushFollow(FOLLOW_queryStatementExpressionBody_in_queryStatementExpression12701);
            queryStatementExpressionBody805=queryStatementExpressionBody(topLevel);

            state._fsp--;

            stream_queryStatementExpressionBody.add(queryStatementExpressionBody805.getTree());


                  if ((w!=null?((CommonTree)w.tree):null) != null) {
                  adaptor.addChild((queryStatementExpressionBody805!=null?((CommonTree)queryStatementExpressionBody805.tree):null), (w!=null?((CommonTree)w.tree):null));
                  }
                

            // AST REWRITE
            // elements: queryStatementExpressionBody
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2052:5: -> queryStatementExpressionBody
            {
                adaptor.addChild(root_0, stream_queryStatementExpressionBody.nextTree());

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "queryStatementExpression"


    public static class queryStatementExpressionBody_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "queryStatementExpressionBody"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2055:1: queryStatementExpressionBody[boolean topLevel] : ( fromStatement[topLevel] | regularBody[topLevel] );
    public final HiveParser.queryStatementExpressionBody_return queryStatementExpressionBody(boolean topLevel) throws RecognitionException {
        HiveParser.queryStatementExpressionBody_return retval = new HiveParser.queryStatementExpressionBody_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.fromStatement_return fromStatement806 =null;

        HiveParser.regularBody_return regularBody807 =null;



        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2056:5: ( fromStatement[topLevel] | regularBody[topLevel] )
            int alt224=2;
            switch ( input.LA(1) ) {
            case KW_FROM:
                {
                alt224=1;
                }
                break;
            case KW_INSERT:
            case KW_MAP:
            case KW_REDUCE:
            case KW_SELECT:
                {
                alt224=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 224, 0, input);

                throw nvae;

            }

            switch (alt224) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2057:5: fromStatement[topLevel]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_fromStatement_in_queryStatementExpressionBody12735);
                    fromStatement806=fromStatement(topLevel);

                    state._fsp--;

                    adaptor.addChild(root_0, fromStatement806.getTree());

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2058:7: regularBody[topLevel]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_regularBody_in_queryStatementExpressionBody12744);
                    regularBody807=regularBody(topLevel);

                    state._fsp--;

                    adaptor.addChild(root_0, regularBody807.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "queryStatementExpressionBody"


    public static class withClause_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "withClause"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2061:1: withClause : KW_WITH cteStatement ( COMMA cteStatement )* -> ^( TOK_CTE ( cteStatement )+ ) ;
    public final HiveParser.withClause_return withClause() throws RecognitionException {
        HiveParser.withClause_return retval = new HiveParser.withClause_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_WITH808=null;
        Token COMMA810=null;
        HiveParser.cteStatement_return cteStatement809 =null;

        HiveParser.cteStatement_return cteStatement811 =null;


        CommonTree KW_WITH808_tree=null;
        CommonTree COMMA810_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleTokenStream stream_KW_WITH=new RewriteRuleTokenStream(adaptor,"token KW_WITH");
        RewriteRuleSubtreeStream stream_cteStatement=new RewriteRuleSubtreeStream(adaptor,"rule cteStatement");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2062:3: ( KW_WITH cteStatement ( COMMA cteStatement )* -> ^( TOK_CTE ( cteStatement )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2063:3: KW_WITH cteStatement ( COMMA cteStatement )*
            {
            KW_WITH808=(Token)match(input,KW_WITH,FOLLOW_KW_WITH_in_withClause12762);  
            stream_KW_WITH.add(KW_WITH808);


            pushFollow(FOLLOW_cteStatement_in_withClause12764);
            cteStatement809=cteStatement();

            state._fsp--;

            stream_cteStatement.add(cteStatement809.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2063:24: ( COMMA cteStatement )*
            loop225:
            do {
                int alt225=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt225=1;
                    }
                    break;

                }

                switch (alt225) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2063:25: COMMA cteStatement
            	    {
            	    COMMA810=(Token)match(input,COMMA,FOLLOW_COMMA_in_withClause12767);  
            	    stream_COMMA.add(COMMA810);


            	    pushFollow(FOLLOW_cteStatement_in_withClause12769);
            	    cteStatement811=cteStatement();

            	    state._fsp--;

            	    stream_cteStatement.add(cteStatement811.getTree());

            	    }
            	    break;

            	default :
            	    break loop225;
                }
            } while (true);


            // AST REWRITE
            // elements: cteStatement
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2063:46: -> ^( TOK_CTE ( cteStatement )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2063:49: ^( TOK_CTE ( cteStatement )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_CTE, "TOK_CTE")
                , root_1);

                if ( !(stream_cteStatement.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_cteStatement.hasNext() ) {
                    adaptor.addChild(root_1, stream_cteStatement.nextTree());

                }
                stream_cteStatement.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "withClause"


    public static class cteStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "cteStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2066:1: cteStatement : identifier KW_AS LPAREN queryStatementExpression[false] RPAREN -> ^( TOK_SUBQUERY queryStatementExpression identifier ) ;
    public final HiveParser.cteStatement_return cteStatement() throws RecognitionException {
        HiveParser.cteStatement_return retval = new HiveParser.cteStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_AS813=null;
        Token LPAREN814=null;
        Token RPAREN816=null;
        HiveParser_IdentifiersParser.identifier_return identifier812 =null;

        HiveParser.queryStatementExpression_return queryStatementExpression815 =null;


        CommonTree KW_AS813_tree=null;
        CommonTree LPAREN814_tree=null;
        CommonTree RPAREN816_tree=null;
        RewriteRuleTokenStream stream_KW_AS=new RewriteRuleTokenStream(adaptor,"token KW_AS");
        RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN");
        RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN");
        RewriteRuleSubtreeStream stream_identifier=new RewriteRuleSubtreeStream(adaptor,"rule identifier");
        RewriteRuleSubtreeStream stream_queryStatementExpression=new RewriteRuleSubtreeStream(adaptor,"rule queryStatementExpression");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2067:4: ( identifier KW_AS LPAREN queryStatementExpression[false] RPAREN -> ^( TOK_SUBQUERY queryStatementExpression identifier ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2068:4: identifier KW_AS LPAREN queryStatementExpression[false] RPAREN
            {
            pushFollow(FOLLOW_identifier_in_cteStatement12795);
            identifier812=identifier();

            state._fsp--;

            stream_identifier.add(identifier812.getTree());

            KW_AS813=(Token)match(input,KW_AS,FOLLOW_KW_AS_in_cteStatement12797);  
            stream_KW_AS.add(KW_AS813);


            LPAREN814=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_cteStatement12799);  
            stream_LPAREN.add(LPAREN814);


            pushFollow(FOLLOW_queryStatementExpression_in_cteStatement12801);
            queryStatementExpression815=queryStatementExpression(false);

            state._fsp--;

            stream_queryStatementExpression.add(queryStatementExpression815.getTree());

            RPAREN816=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_cteStatement12804);  
            stream_RPAREN.add(RPAREN816);


            // AST REWRITE
            // elements: identifier, queryStatementExpression
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2069:4: -> ^( TOK_SUBQUERY queryStatementExpression identifier )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2069:7: ^( TOK_SUBQUERY queryStatementExpression identifier )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY")
                , root_1);

                adaptor.addChild(root_1, stream_queryStatementExpression.nextTree());

                adaptor.addChild(root_1, stream_identifier.nextTree());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "cteStatement"


    public static class fromStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "fromStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2072:1: fromStatement[boolean topLevel] : ( singleFromStatement -> singleFromStatement ) (u= setOperator r= singleFromStatement -> ^( $u $r) )* -> {u != null && topLevel}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) ->;
    public final HiveParser.fromStatement_return fromStatement(boolean topLevel) throws RecognitionException {
        HiveParser.fromStatement_return retval = new HiveParser.fromStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.setOperator_return u =null;

        HiveParser.singleFromStatement_return r =null;

        HiveParser.singleFromStatement_return singleFromStatement817 =null;


        RewriteRuleSubtreeStream stream_singleFromStatement=new RewriteRuleSubtreeStream(adaptor,"rule singleFromStatement");
        RewriteRuleSubtreeStream stream_setOperator=new RewriteRuleSubtreeStream(adaptor,"rule setOperator");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2073:3: ( ( singleFromStatement -> singleFromStatement ) (u= setOperator r= singleFromStatement -> ^( $u $r) )* -> {u != null && topLevel}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) ->)
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2073:3: ( singleFromStatement -> singleFromStatement ) (u= setOperator r= singleFromStatement -> ^( $u $r) )*
            {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2073:3: ( singleFromStatement -> singleFromStatement )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2073:4: singleFromStatement
            {
            pushFollow(FOLLOW_singleFromStatement_in_fromStatement12828);
            singleFromStatement817=singleFromStatement();

            state._fsp--;

            stream_singleFromStatement.add(singleFromStatement817.getTree());

            // AST REWRITE
            // elements: singleFromStatement
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2073:25: -> singleFromStatement
            {
                adaptor.addChild(root_0, stream_singleFromStatement.nextTree());

            }


            retval.tree = root_0;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2074:2: (u= setOperator r= singleFromStatement -> ^( $u $r) )*
            loop226:
            do {
                int alt226=2;
                switch ( input.LA(1) ) {
                case KW_UNION:
                    {
                    alt226=1;
                    }
                    break;

                }

                switch (alt226) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2074:3: u= setOperator r= singleFromStatement
            	    {
            	    pushFollow(FOLLOW_setOperator_in_fromStatement12840);
            	    u=setOperator();

            	    state._fsp--;

            	    stream_setOperator.add(u.getTree());

            	    pushFollow(FOLLOW_singleFromStatement_in_fromStatement12844);
            	    r=singleFromStatement();

            	    state._fsp--;

            	    stream_singleFromStatement.add(r.getTree());

            	    // AST REWRITE
            	    // elements: r, u
            	    // token labels: 
            	    // rule labels: retval, u, r
            	    // token list labels: 
            	    // rule list labels: 
            	    // wildcard labels: 
            	    retval.tree = root_0;
            	    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            	    RewriteRuleSubtreeStream stream_u=new RewriteRuleSubtreeStream(adaptor,"rule u",u!=null?u.tree:null);
            	    RewriteRuleSubtreeStream stream_r=new RewriteRuleSubtreeStream(adaptor,"rule r",r!=null?r.tree:null);

            	    root_0 = (CommonTree)adaptor.nil();
            	    // 2075:4: -> ^( $u $r)
            	    {
            	        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2075:7: ^( $u $r)
            	        {
            	        CommonTree root_1 = (CommonTree)adaptor.nil();
            	        root_1 = (CommonTree)adaptor.becomeRoot(stream_u.nextNode(), root_1);

            	        adaptor.addChild(root_1, ((CommonTree)retval.tree));

            	        adaptor.addChild(root_1, stream_r.nextTree());

            	        adaptor.addChild(root_0, root_1);
            	        }

            	    }


            	    retval.tree = root_0;

            	    }
            	    break;

            	default :
            	    break loop226;
                }
            } while (true);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2077:3: -> {u != null && topLevel}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) )
            if (u != null && topLevel) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2077:31: ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_QUERY, "TOK_QUERY")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2078:9: ^( TOK_FROM ^( TOK_SUBQUERY ) )
                {
                CommonTree root_2 = (CommonTree)adaptor.nil();
                root_2 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_FROM, "TOK_FROM")
                , root_2);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2079:11: ^( TOK_SUBQUERY )
                {
                CommonTree root_3 = (CommonTree)adaptor.nil();
                root_3 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY")
                , root_3);

                adaptor.addChild(root_3, ((CommonTree)retval.tree));

                adaptor.addChild(root_3, adaptor.create(Identifier, generateUnionAlias()));

                adaptor.addChild(root_2, root_3);
                }

                adaptor.addChild(root_1, root_2);
                }

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2084:9: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) )
                {
                CommonTree root_2 = (CommonTree)adaptor.nil();
                root_2 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_INSERT, "TOK_INSERT")
                , root_2);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2085:12: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
                {
                CommonTree root_3 = (CommonTree)adaptor.nil();
                root_3 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION")
                , root_3);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2085:30: ^( TOK_DIR TOK_TMP_FILE )
                {
                CommonTree root_4 = (CommonTree)adaptor.nil();
                root_4 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DIR, "TOK_DIR")
                , root_4);

                adaptor.addChild(root_4, 
                (CommonTree)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE")
                );

                adaptor.addChild(root_3, root_4);
                }

                adaptor.addChild(root_2, root_3);
                }

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2086:12: ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) )
                {
                CommonTree root_3 = (CommonTree)adaptor.nil();
                root_3 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SELECT, "TOK_SELECT")
                , root_3);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2086:25: ^( TOK_SELEXPR TOK_ALLCOLREF )
                {
                CommonTree root_4 = (CommonTree)adaptor.nil();
                root_4 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR")
                , root_4);

                adaptor.addChild(root_4, 
                (CommonTree)adaptor.create(TOK_ALLCOLREF, "TOK_ALLCOLREF")
                );

                adaptor.addChild(root_3, root_4);
                }

                adaptor.addChild(root_2, root_3);
                }

                adaptor.addChild(root_1, root_2);
                }

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 2089:5: ->
            {
                adaptor.addChild(root_0, ((CommonTree)retval.tree));

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "fromStatement"


    public static class singleFromStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "singleFromStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2093:1: singleFromStatement : fromClause (b+= body )+ -> ^( TOK_QUERY fromClause ( body )+ ) ;
    public final HiveParser.singleFromStatement_return singleFromStatement() throws RecognitionException {
        HiveParser.singleFromStatement_return retval = new HiveParser.singleFromStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        List list_b=null;
        HiveParser_FromClauseParser.fromClause_return fromClause818 =null;

        RuleReturnScope b = null;
        RewriteRuleSubtreeStream stream_body=new RewriteRuleSubtreeStream(adaptor,"rule body");
        RewriteRuleSubtreeStream stream_fromClause=new RewriteRuleSubtreeStream(adaptor,"rule fromClause");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2094:5: ( fromClause (b+= body )+ -> ^( TOK_QUERY fromClause ( body )+ ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2095:5: fromClause (b+= body )+
            {
            pushFollow(FOLLOW_fromClause_in_singleFromStatement13051);
            fromClause818=fromClause();

            state._fsp--;

            stream_fromClause.add(fromClause818.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2096:5: (b+= body )+
            int cnt227=0;
            loop227:
            do {
                int alt227=2;
                switch ( input.LA(1) ) {
                case KW_INSERT:
                case KW_MAP:
                case KW_REDUCE:
                case KW_SELECT:
                    {
                    alt227=1;
                    }
                    break;

                }

                switch (alt227) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2096:7: b+= body
            	    {
            	    pushFollow(FOLLOW_body_in_singleFromStatement13061);
            	    b=body();

            	    state._fsp--;

            	    stream_body.add(b.getTree());
            	    if (list_b==null) list_b=new ArrayList();
            	    list_b.add(b.getTree());


            	    }
            	    break;

            	default :
            	    if ( cnt227 >= 1 ) break loop227;
                        EarlyExitException eee =
                            new EarlyExitException(227, input);
                        throw eee;
                }
                cnt227++;
            } while (true);


            // AST REWRITE
            // elements: fromClause, body
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2096:18: -> ^( TOK_QUERY fromClause ( body )+ )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2096:21: ^( TOK_QUERY fromClause ( body )+ )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_QUERY, "TOK_QUERY")
                , root_1);

                adaptor.addChild(root_1, stream_fromClause.nextTree());

                if ( !(stream_body.hasNext()) ) {
                    throw new RewriteEarlyExitException();
                }
                while ( stream_body.hasNext() ) {
                    adaptor.addChild(root_1, stream_body.nextTree());

                }
                stream_body.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "singleFromStatement"


    public static class regularBody_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "regularBody"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2106:1: regularBody[boolean topLevel] : (i= insertClause (s= selectStatement[topLevel] ->| valuesClause -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause ) ) ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) ) | selectStatement[topLevel] );
    public final HiveParser.regularBody_return regularBody(boolean topLevel) throws RecognitionException {
        HiveParser.regularBody_return retval = new HiveParser.regularBody_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.insertClause_return i =null;

        HiveParser.selectStatement_return s =null;

        HiveParser_FromClauseParser.valuesClause_return valuesClause819 =null;

        HiveParser.selectStatement_return selectStatement820 =null;


        RewriteRuleSubtreeStream stream_selectStatement=new RewriteRuleSubtreeStream(adaptor,"rule selectStatement");
        RewriteRuleSubtreeStream stream_valuesClause=new RewriteRuleSubtreeStream(adaptor,"rule valuesClause");
        RewriteRuleSubtreeStream stream_insertClause=new RewriteRuleSubtreeStream(adaptor,"rule insertClause");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2107:4: (i= insertClause (s= selectStatement[topLevel] ->| valuesClause -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause ) ) ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) ) | selectStatement[topLevel] )
            int alt229=2;
            switch ( input.LA(1) ) {
            case KW_INSERT:
                {
                alt229=1;
                }
                break;
            case KW_MAP:
            case KW_REDUCE:
            case KW_SELECT:
                {
                alt229=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 229, 0, input);

                throw nvae;

            }

            switch (alt229) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2108:4: i= insertClause (s= selectStatement[topLevel] ->| valuesClause -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause ) ) ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) )
                    {
                    pushFollow(FOLLOW_insertClause_in_regularBody13099);
                    i=insertClause();

                    state._fsp--;

                    stream_insertClause.add(i.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2109:4: (s= selectStatement[topLevel] ->| valuesClause -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause ) ) ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) )
                    int alt228=2;
                    switch ( input.LA(1) ) {
                    case KW_MAP:
                    case KW_REDUCE:
                    case KW_SELECT:
                        {
                        alt228=1;
                        }
                        break;
                    case KW_VALUES:
                        {
                        alt228=2;
                        }
                        break;
                    default:
                        NoViableAltException nvae =
                            new NoViableAltException("", 228, 0, input);

                        throw nvae;

                    }

                    switch (alt228) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2110:4: s= selectStatement[topLevel]
                            {
                            pushFollow(FOLLOW_selectStatement_in_regularBody13111);
                            s=selectStatement(topLevel);

                            state._fsp--;

                            stream_selectStatement.add(s.getTree());

                            (s!=null?((CommonTree)s.tree):null).getChild(1).replaceChildren(0, 0, (i!=null?((CommonTree)i.tree):null));

                            // AST REWRITE
                            // elements: 
                            // token labels: 
                            // rule labels: retval
                            // token list labels: 
                            // rule list labels: 
                            // wildcard labels: 
                            retval.tree = root_0;
                            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                            root_0 = (CommonTree)adaptor.nil();
                            // 2111:60: ->
                            {
                                adaptor.addChild(root_0, (s!=null?((CommonTree)s.tree):null));

                            }


                            retval.tree = root_0;

                            }
                            break;
                        case 2 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2113:6: valuesClause
                            {
                            pushFollow(FOLLOW_valuesClause_in_regularBody13137);
                            valuesClause819=valuesClause();

                            state._fsp--;

                            stream_valuesClause.add(valuesClause819.getTree());

                            // AST REWRITE
                            // elements: valuesClause
                            // token labels: 
                            // rule labels: retval
                            // token list labels: 
                            // rule list labels: 
                            // wildcard labels: 
                            retval.tree = root_0;
                            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                            root_0 = (CommonTree)adaptor.nil();
                            // 2114:7: -> ^( TOK_QUERY ^( TOK_FROM ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause ) ) ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) )
                            {
                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2114:10: ^( TOK_QUERY ^( TOK_FROM ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause ) ) ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) )
                                {
                                CommonTree root_1 = (CommonTree)adaptor.nil();
                                root_1 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_QUERY, "TOK_QUERY")
                                , root_1);

                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2115:13: ^( TOK_FROM ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause ) )
                                {
                                CommonTree root_2 = (CommonTree)adaptor.nil();
                                root_2 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_FROM, "TOK_FROM")
                                , root_2);

                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2116:15: ^( TOK_VIRTUAL_TABLE ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) ) valuesClause )
                                {
                                CommonTree root_3 = (CommonTree)adaptor.nil();
                                root_3 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_VIRTUAL_TABLE, "TOK_VIRTUAL_TABLE")
                                , root_3);

                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2116:35: ^( TOK_VIRTUAL_TABREF ^( TOK_ANONYMOUS ) )
                                {
                                CommonTree root_4 = (CommonTree)adaptor.nil();
                                root_4 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_VIRTUAL_TABREF, "TOK_VIRTUAL_TABREF")
                                , root_4);

                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2116:56: ^( TOK_ANONYMOUS )
                                {
                                CommonTree root_5 = (CommonTree)adaptor.nil();
                                root_5 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_ANONYMOUS, "TOK_ANONYMOUS")
                                , root_5);

                                adaptor.addChild(root_4, root_5);
                                }

                                adaptor.addChild(root_3, root_4);
                                }

                                adaptor.addChild(root_3, stream_valuesClause.nextTree());

                                adaptor.addChild(root_2, root_3);
                                }

                                adaptor.addChild(root_1, root_2);
                                }

                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2118:13: ^( TOK_INSERT ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) )
                                {
                                CommonTree root_2 = (CommonTree)adaptor.nil();
                                root_2 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_INSERT, "TOK_INSERT")
                                , root_2);

                                adaptor.addChild(root_2, (i!=null?((CommonTree)i.tree):null));

                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2118:36: ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) )
                                {
                                CommonTree root_3 = (CommonTree)adaptor.nil();
                                root_3 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_SELECT, "TOK_SELECT")
                                , root_3);

                                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2118:49: ^( TOK_SELEXPR TOK_ALLCOLREF )
                                {
                                CommonTree root_4 = (CommonTree)adaptor.nil();
                                root_4 = (CommonTree)adaptor.becomeRoot(
                                (CommonTree)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR")
                                , root_4);

                                adaptor.addChild(root_4, 
                                (CommonTree)adaptor.create(TOK_ALLCOLREF, "TOK_ALLCOLREF")
                                );

                                adaptor.addChild(root_3, root_4);
                                }

                                adaptor.addChild(root_2, root_3);
                                }

                                adaptor.addChild(root_1, root_2);
                                }

                                adaptor.addChild(root_0, root_1);
                                }

                            }


                            retval.tree = root_0;

                            }
                            break;

                    }


                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2122:4: selectStatement[topLevel]
                    {
                    root_0 = (CommonTree)adaptor.nil();


                    pushFollow(FOLLOW_selectStatement_in_regularBody13261);
                    selectStatement820=selectStatement(topLevel);

                    state._fsp--;

                    adaptor.addChild(root_0, selectStatement820.getTree());

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "regularBody"


    public static class selectStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "selectStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2125:2: selectStatement[boolean topLevel] : ( singleSelectStatement -> singleSelectStatement ) (u= setOperator b= singleSelectStatement -> ^( $u $b) )* -> {u != null && topLevel}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) ->;
    public final HiveParser.selectStatement_return selectStatement(boolean topLevel) throws RecognitionException {
        HiveParser.selectStatement_return retval = new HiveParser.selectStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.setOperator_return u =null;

        HiveParser.singleSelectStatement_return b =null;

        HiveParser.singleSelectStatement_return singleSelectStatement821 =null;


        RewriteRuleSubtreeStream stream_singleSelectStatement=new RewriteRuleSubtreeStream(adaptor,"rule singleSelectStatement");
        RewriteRuleSubtreeStream stream_setOperator=new RewriteRuleSubtreeStream(adaptor,"rule setOperator");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:2: ( ( singleSelectStatement -> singleSelectStatement ) (u= setOperator b= singleSelectStatement -> ^( $u $b) )* -> {u != null && topLevel}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) ) ->)
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:4: ( singleSelectStatement -> singleSelectStatement ) (u= setOperator b= singleSelectStatement -> ^( $u $b) )*
            {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:4: ( singleSelectStatement -> singleSelectStatement )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2126:5: singleSelectStatement
            {
            pushFollow(FOLLOW_singleSelectStatement_in_selectStatement13278);
            singleSelectStatement821=singleSelectStatement();

            state._fsp--;

            stream_singleSelectStatement.add(singleSelectStatement821.getTree());

            // AST REWRITE
            // elements: singleSelectStatement
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2126:27: -> singleSelectStatement
            {
                adaptor.addChild(root_0, stream_singleSelectStatement.nextTree());

            }


            retval.tree = root_0;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2127:4: (u= setOperator b= singleSelectStatement -> ^( $u $b) )*
            loop230:
            do {
                int alt230=2;
                switch ( input.LA(1) ) {
                case KW_UNION:
                    {
                    alt230=1;
                    }
                    break;

                }

                switch (alt230) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2127:5: u= setOperator b= singleSelectStatement
            	    {
            	    pushFollow(FOLLOW_setOperator_in_selectStatement13291);
            	    u=setOperator();

            	    state._fsp--;

            	    stream_setOperator.add(u.getTree());

            	    pushFollow(FOLLOW_singleSelectStatement_in_selectStatement13295);
            	    b=singleSelectStatement();

            	    state._fsp--;

            	    stream_singleSelectStatement.add(b.getTree());

            	    // AST REWRITE
            	    // elements: b, u
            	    // token labels: 
            	    // rule labels: retval, u, b
            	    // token list labels: 
            	    // rule list labels: 
            	    // wildcard labels: 
            	    retval.tree = root_0;
            	    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
            	    RewriteRuleSubtreeStream stream_u=new RewriteRuleSubtreeStream(adaptor,"rule u",u!=null?u.tree:null);
            	    RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.tree:null);

            	    root_0 = (CommonTree)adaptor.nil();
            	    // 2128:8: -> ^( $u $b)
            	    {
            	        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2128:11: ^( $u $b)
            	        {
            	        CommonTree root_1 = (CommonTree)adaptor.nil();
            	        root_1 = (CommonTree)adaptor.becomeRoot(stream_u.nextNode(), root_1);

            	        adaptor.addChild(root_1, ((CommonTree)retval.tree));

            	        adaptor.addChild(root_1, stream_b.nextTree());

            	        adaptor.addChild(root_0, root_1);
            	        }

            	    }


            	    retval.tree = root_0;

            	    }
            	    break;

            	default :
            	    break loop230;
                }
            } while (true);


            // AST REWRITE
            // elements: 
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2130:4: -> {u != null && topLevel}? ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) )
            if (u != null && topLevel) {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2130:32: ^( TOK_QUERY ^( TOK_FROM ^( TOK_SUBQUERY ) ) ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) ) )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_QUERY, "TOK_QUERY")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2131:10: ^( TOK_FROM ^( TOK_SUBQUERY ) )
                {
                CommonTree root_2 = (CommonTree)adaptor.nil();
                root_2 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_FROM, "TOK_FROM")
                , root_2);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2132:12: ^( TOK_SUBQUERY )
                {
                CommonTree root_3 = (CommonTree)adaptor.nil();
                root_3 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SUBQUERY, "TOK_SUBQUERY")
                , root_3);

                adaptor.addChild(root_3, ((CommonTree)retval.tree));

                adaptor.addChild(root_3, adaptor.create(Identifier, generateUnionAlias()));

                adaptor.addChild(root_2, root_3);
                }

                adaptor.addChild(root_1, root_2);
                }

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2137:10: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) ) )
                {
                CommonTree root_2 = (CommonTree)adaptor.nil();
                root_2 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_INSERT, "TOK_INSERT")
                , root_2);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2138:13: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
                {
                CommonTree root_3 = (CommonTree)adaptor.nil();
                root_3 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION")
                , root_3);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2138:31: ^( TOK_DIR TOK_TMP_FILE )
                {
                CommonTree root_4 = (CommonTree)adaptor.nil();
                root_4 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DIR, "TOK_DIR")
                , root_4);

                adaptor.addChild(root_4, 
                (CommonTree)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE")
                );

                adaptor.addChild(root_3, root_4);
                }

                adaptor.addChild(root_2, root_3);
                }

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2139:13: ^( TOK_SELECT ^( TOK_SELEXPR TOK_ALLCOLREF ) )
                {
                CommonTree root_3 = (CommonTree)adaptor.nil();
                root_3 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SELECT, "TOK_SELECT")
                , root_3);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2139:26: ^( TOK_SELEXPR TOK_ALLCOLREF )
                {
                CommonTree root_4 = (CommonTree)adaptor.nil();
                root_4 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SELEXPR, "TOK_SELEXPR")
                , root_4);

                adaptor.addChild(root_4, 
                (CommonTree)adaptor.create(TOK_ALLCOLREF, "TOK_ALLCOLREF")
                );

                adaptor.addChild(root_3, root_4);
                }

                adaptor.addChild(root_2, root_3);
                }

                adaptor.addChild(root_1, root_2);
                }

                adaptor.addChild(root_0, root_1);
                }

            }

            else // 2142:5: ->
            {
                adaptor.addChild(root_0, ((CommonTree)retval.tree));

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "selectStatement"


    public static class singleSelectStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "singleSelectStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2145:1: singleSelectStatement : selectClause ( fromClause )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? -> ^( TOK_QUERY ( fromClause )? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) ) ;
    public final HiveParser.singleSelectStatement_return singleSelectStatement() throws RecognitionException {
        HiveParser.singleSelectStatement_return retval = new HiveParser.singleSelectStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser_SelectClauseParser.selectClause_return selectClause822 =null;

        HiveParser_FromClauseParser.fromClause_return fromClause823 =null;

        HiveParser_FromClauseParser.whereClause_return whereClause824 =null;

        HiveParser_IdentifiersParser.groupByClause_return groupByClause825 =null;

        HiveParser_IdentifiersParser.havingClause_return havingClause826 =null;

        HiveParser_IdentifiersParser.orderByClause_return orderByClause827 =null;

        HiveParser_IdentifiersParser.clusterByClause_return clusterByClause828 =null;

        HiveParser_IdentifiersParser.distributeByClause_return distributeByClause829 =null;

        HiveParser_IdentifiersParser.sortByClause_return sortByClause830 =null;

        HiveParser_SelectClauseParser.window_clause_return window_clause831 =null;

        HiveParser.limitClause_return limitClause832 =null;


        RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
        RewriteRuleSubtreeStream stream_window_clause=new RewriteRuleSubtreeStream(adaptor,"rule window_clause");
        RewriteRuleSubtreeStream stream_clusterByClause=new RewriteRuleSubtreeStream(adaptor,"rule clusterByClause");
        RewriteRuleSubtreeStream stream_distributeByClause=new RewriteRuleSubtreeStream(adaptor,"rule distributeByClause");
        RewriteRuleSubtreeStream stream_limitClause=new RewriteRuleSubtreeStream(adaptor,"rule limitClause");
        RewriteRuleSubtreeStream stream_orderByClause=new RewriteRuleSubtreeStream(adaptor,"rule orderByClause");
        RewriteRuleSubtreeStream stream_sortByClause=new RewriteRuleSubtreeStream(adaptor,"rule sortByClause");
        RewriteRuleSubtreeStream stream_groupByClause=new RewriteRuleSubtreeStream(adaptor,"rule groupByClause");
        RewriteRuleSubtreeStream stream_havingClause=new RewriteRuleSubtreeStream(adaptor,"rule havingClause");
        RewriteRuleSubtreeStream stream_selectClause=new RewriteRuleSubtreeStream(adaptor,"rule selectClause");
        RewriteRuleSubtreeStream stream_fromClause=new RewriteRuleSubtreeStream(adaptor,"rule fromClause");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2146:4: ( selectClause ( fromClause )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? -> ^( TOK_QUERY ( fromClause )? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2147:4: selectClause ( fromClause )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )?
            {
            pushFollow(FOLLOW_selectClause_in_singleSelectStatement13517);
            selectClause822=selectClause();

            state._fsp--;

            stream_selectClause.add(selectClause822.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2148:4: ( fromClause )?
            int alt231=2;
            switch ( input.LA(1) ) {
                case KW_FROM:
                    {
                    alt231=1;
                    }
                    break;
            }

            switch (alt231) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2148:4: fromClause
                    {
                    pushFollow(FOLLOW_fromClause_in_singleSelectStatement13522);
                    fromClause823=fromClause();

                    state._fsp--;

                    stream_fromClause.add(fromClause823.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2149:4: ( whereClause )?
            int alt232=2;
            switch ( input.LA(1) ) {
                case KW_WHERE:
                    {
                    alt232=1;
                    }
                    break;
            }

            switch (alt232) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2149:4: whereClause
                    {
                    pushFollow(FOLLOW_whereClause_in_singleSelectStatement13528);
                    whereClause824=whereClause();

                    state._fsp--;

                    stream_whereClause.add(whereClause824.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2150:4: ( groupByClause )?
            int alt233=2;
            switch ( input.LA(1) ) {
                case KW_GROUP:
                    {
                    alt233=1;
                    }
                    break;
            }

            switch (alt233) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2150:4: groupByClause
                    {
                    pushFollow(FOLLOW_groupByClause_in_singleSelectStatement13534);
                    groupByClause825=groupByClause();

                    state._fsp--;

                    stream_groupByClause.add(groupByClause825.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2151:4: ( havingClause )?
            int alt234=2;
            switch ( input.LA(1) ) {
                case KW_HAVING:
                    {
                    alt234=1;
                    }
                    break;
            }

            switch (alt234) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2151:4: havingClause
                    {
                    pushFollow(FOLLOW_havingClause_in_singleSelectStatement13540);
                    havingClause826=havingClause();

                    state._fsp--;

                    stream_havingClause.add(havingClause826.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2152:4: ( orderByClause )?
            int alt235=2;
            switch ( input.LA(1) ) {
                case KW_ORDER:
                    {
                    alt235=1;
                    }
                    break;
            }

            switch (alt235) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2152:4: orderByClause
                    {
                    pushFollow(FOLLOW_orderByClause_in_singleSelectStatement13546);
                    orderByClause827=orderByClause();

                    state._fsp--;

                    stream_orderByClause.add(orderByClause827.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2153:4: ( clusterByClause )?
            int alt236=2;
            switch ( input.LA(1) ) {
                case KW_CLUSTER:
                    {
                    alt236=1;
                    }
                    break;
            }

            switch (alt236) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2153:4: clusterByClause
                    {
                    pushFollow(FOLLOW_clusterByClause_in_singleSelectStatement13552);
                    clusterByClause828=clusterByClause();

                    state._fsp--;

                    stream_clusterByClause.add(clusterByClause828.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2154:4: ( distributeByClause )?
            int alt237=2;
            switch ( input.LA(1) ) {
                case KW_DISTRIBUTE:
                    {
                    alt237=1;
                    }
                    break;
            }

            switch (alt237) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2154:4: distributeByClause
                    {
                    pushFollow(FOLLOW_distributeByClause_in_singleSelectStatement13558);
                    distributeByClause829=distributeByClause();

                    state._fsp--;

                    stream_distributeByClause.add(distributeByClause829.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2155:4: ( sortByClause )?
            int alt238=2;
            switch ( input.LA(1) ) {
                case KW_SORT:
                    {
                    alt238=1;
                    }
                    break;
            }

            switch (alt238) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2155:4: sortByClause
                    {
                    pushFollow(FOLLOW_sortByClause_in_singleSelectStatement13564);
                    sortByClause830=sortByClause();

                    state._fsp--;

                    stream_sortByClause.add(sortByClause830.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2156:4: ( window_clause )?
            int alt239=2;
            switch ( input.LA(1) ) {
                case KW_WINDOW:
                    {
                    alt239=1;
                    }
                    break;
            }

            switch (alt239) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2156:4: window_clause
                    {
                    pushFollow(FOLLOW_window_clause_in_singleSelectStatement13570);
                    window_clause831=window_clause();

                    state._fsp--;

                    stream_window_clause.add(window_clause831.getTree());

                    }
                    break;

            }


            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:4: ( limitClause )?
            int alt240=2;
            switch ( input.LA(1) ) {
                case KW_LIMIT:
                    {
                    alt240=1;
                    }
                    break;
            }

            switch (alt240) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:4: limitClause
                    {
                    pushFollow(FOLLOW_limitClause_in_singleSelectStatement13576);
                    limitClause832=limitClause();

                    state._fsp--;

                    stream_limitClause.add(limitClause832.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: sortByClause, clusterByClause, orderByClause, groupByClause, whereClause, selectClause, window_clause, distributeByClause, havingClause, limitClause, fromClause
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2157:17: -> ^( TOK_QUERY ( fromClause )? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:20: ^( TOK_QUERY ( fromClause )? ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_QUERY, "TOK_QUERY")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:32: ( fromClause )?
                if ( stream_fromClause.hasNext() ) {
                    adaptor.addChild(root_1, stream_fromClause.nextTree());

                }
                stream_fromClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:44: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
                {
                CommonTree root_2 = (CommonTree)adaptor.nil();
                root_2 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_INSERT, "TOK_INSERT")
                , root_2);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:57: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
                {
                CommonTree root_3 = (CommonTree)adaptor.nil();
                root_3 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION")
                , root_3);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2157:75: ^( TOK_DIR TOK_TMP_FILE )
                {
                CommonTree root_4 = (CommonTree)adaptor.nil();
                root_4 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DIR, "TOK_DIR")
                , root_4);

                adaptor.addChild(root_4, 
                (CommonTree)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE")
                );

                adaptor.addChild(root_3, root_4);
                }

                adaptor.addChild(root_2, root_3);
                }

                adaptor.addChild(root_2, stream_selectClause.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2158:35: ( whereClause )?
                if ( stream_whereClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_whereClause.nextTree());

                }
                stream_whereClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2158:48: ( groupByClause )?
                if ( stream_groupByClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_groupByClause.nextTree());

                }
                stream_groupByClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2158:63: ( havingClause )?
                if ( stream_havingClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_havingClause.nextTree());

                }
                stream_havingClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2158:77: ( orderByClause )?
                if ( stream_orderByClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_orderByClause.nextTree());

                }
                stream_orderByClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2158:92: ( clusterByClause )?
                if ( stream_clusterByClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_clusterByClause.nextTree());

                }
                stream_clusterByClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2159:22: ( distributeByClause )?
                if ( stream_distributeByClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_distributeByClause.nextTree());

                }
                stream_distributeByClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2159:42: ( sortByClause )?
                if ( stream_sortByClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_sortByClause.nextTree());

                }
                stream_sortByClause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2159:56: ( window_clause )?
                if ( stream_window_clause.hasNext() ) {
                    adaptor.addChild(root_2, stream_window_clause.nextTree());

                }
                stream_window_clause.reset();

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2159:71: ( limitClause )?
                if ( stream_limitClause.hasNext() ) {
                    adaptor.addChild(root_2, stream_limitClause.nextTree());

                }
                stream_limitClause.reset();

                adaptor.addChild(root_1, root_2);
                }

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "singleSelectStatement"


    public static class selectStatementWithCTE_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "selectStatementWithCTE"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2162:1: selectStatementWithCTE : (w= withClause )? selectStatement[true] -> selectStatement ;
    public final HiveParser.selectStatementWithCTE_return selectStatementWithCTE() throws RecognitionException {
        HiveParser.selectStatementWithCTE_return retval = new HiveParser.selectStatementWithCTE_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.withClause_return w =null;

        HiveParser.selectStatement_return selectStatement833 =null;


        RewriteRuleSubtreeStream stream_selectStatement=new RewriteRuleSubtreeStream(adaptor,"rule selectStatement");
        RewriteRuleSubtreeStream stream_withClause=new RewriteRuleSubtreeStream(adaptor,"rule withClause");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2163:5: ( (w= withClause )? selectStatement[true] -> selectStatement )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2164:5: (w= withClause )? selectStatement[true]
            {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2164:5: (w= withClause )?
            int alt241=2;
            switch ( input.LA(1) ) {
                case KW_WITH:
                    {
                    alt241=1;
                    }
                    break;
            }

            switch (alt241) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2164:6: w= withClause
                    {
                    pushFollow(FOLLOW_withClause_in_selectStatementWithCTE13694);
                    w=withClause();

                    state._fsp--;

                    stream_withClause.add(w.getTree());

                    }
                    break;

            }


            pushFollow(FOLLOW_selectStatement_in_selectStatementWithCTE13702);
            selectStatement833=selectStatement(true);

            state._fsp--;

            stream_selectStatement.add(selectStatement833.getTree());


                  if ((w!=null?((CommonTree)w.tree):null) != null) {
                  adaptor.addChild((selectStatement833!=null?((CommonTree)selectStatement833.tree):null), (w!=null?((CommonTree)w.tree):null));
                  }
                

            // AST REWRITE
            // elements: selectStatement
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2170:5: -> selectStatement
            {
                adaptor.addChild(root_0, stream_selectStatement.nextTree());

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "selectStatementWithCTE"


    public static class body_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "body"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2173:1: body : ( insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? -> ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) | selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? -> ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) );
    public final HiveParser.body_return body() throws RecognitionException {
        HiveParser.body_return retval = new HiveParser.body_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        HiveParser.insertClause_return insertClause834 =null;

        HiveParser_SelectClauseParser.selectClause_return selectClause835 =null;

        HiveParser_FromClauseParser.lateralView_return lateralView836 =null;

        HiveParser_FromClauseParser.whereClause_return whereClause837 =null;

        HiveParser_IdentifiersParser.groupByClause_return groupByClause838 =null;

        HiveParser_IdentifiersParser.havingClause_return havingClause839 =null;

        HiveParser_IdentifiersParser.orderByClause_return orderByClause840 =null;

        HiveParser_IdentifiersParser.clusterByClause_return clusterByClause841 =null;

        HiveParser_IdentifiersParser.distributeByClause_return distributeByClause842 =null;

        HiveParser_IdentifiersParser.sortByClause_return sortByClause843 =null;

        HiveParser_SelectClauseParser.window_clause_return window_clause844 =null;

        HiveParser.limitClause_return limitClause845 =null;

        HiveParser_SelectClauseParser.selectClause_return selectClause846 =null;

        HiveParser_FromClauseParser.lateralView_return lateralView847 =null;

        HiveParser_FromClauseParser.whereClause_return whereClause848 =null;

        HiveParser_IdentifiersParser.groupByClause_return groupByClause849 =null;

        HiveParser_IdentifiersParser.havingClause_return havingClause850 =null;

        HiveParser_IdentifiersParser.orderByClause_return orderByClause851 =null;

        HiveParser_IdentifiersParser.clusterByClause_return clusterByClause852 =null;

        HiveParser_IdentifiersParser.distributeByClause_return distributeByClause853 =null;

        HiveParser_IdentifiersParser.sortByClause_return sortByClause854 =null;

        HiveParser_SelectClauseParser.window_clause_return window_clause855 =null;

        HiveParser.limitClause_return limitClause856 =null;


        RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
        RewriteRuleSubtreeStream stream_window_clause=new RewriteRuleSubtreeStream(adaptor,"rule window_clause");
        RewriteRuleSubtreeStream stream_clusterByClause=new RewriteRuleSubtreeStream(adaptor,"rule clusterByClause");
        RewriteRuleSubtreeStream stream_distributeByClause=new RewriteRuleSubtreeStream(adaptor,"rule distributeByClause");
        RewriteRuleSubtreeStream stream_lateralView=new RewriteRuleSubtreeStream(adaptor,"rule lateralView");
        RewriteRuleSubtreeStream stream_limitClause=new RewriteRuleSubtreeStream(adaptor,"rule limitClause");
        RewriteRuleSubtreeStream stream_orderByClause=new RewriteRuleSubtreeStream(adaptor,"rule orderByClause");
        RewriteRuleSubtreeStream stream_sortByClause=new RewriteRuleSubtreeStream(adaptor,"rule sortByClause");
        RewriteRuleSubtreeStream stream_insertClause=new RewriteRuleSubtreeStream(adaptor,"rule insertClause");
        RewriteRuleSubtreeStream stream_groupByClause=new RewriteRuleSubtreeStream(adaptor,"rule groupByClause");
        RewriteRuleSubtreeStream stream_havingClause=new RewriteRuleSubtreeStream(adaptor,"rule havingClause");
        RewriteRuleSubtreeStream stream_selectClause=new RewriteRuleSubtreeStream(adaptor,"rule selectClause");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2174:4: ( insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? -> ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) | selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? -> ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? ) )
            int alt262=2;
            switch ( input.LA(1) ) {
            case KW_INSERT:
                {
                alt262=1;
                }
                break;
            case KW_MAP:
            case KW_REDUCE:
            case KW_SELECT:
                {
                alt262=2;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 262, 0, input);

                throw nvae;

            }

            switch (alt262) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2175:4: insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )?
                    {
                    pushFollow(FOLLOW_insertClause_in_body13733);
                    insertClause834=insertClause();

                    state._fsp--;

                    stream_insertClause.add(insertClause834.getTree());

                    pushFollow(FOLLOW_selectClause_in_body13738);
                    selectClause835=selectClause();

                    state._fsp--;

                    stream_selectClause.add(selectClause835.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:4: ( lateralView )?
                    int alt242=2;
                    switch ( input.LA(1) ) {
                        case KW_LATERAL:
                            {
                            alt242=1;
                            }
                            break;
                    }

                    switch (alt242) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2177:4: lateralView
                            {
                            pushFollow(FOLLOW_lateralView_in_body13743);
                            lateralView836=lateralView();

                            state._fsp--;

                            stream_lateralView.add(lateralView836.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2178:4: ( whereClause )?
                    int alt243=2;
                    switch ( input.LA(1) ) {
                        case KW_WHERE:
                            {
                            alt243=1;
                            }
                            break;
                    }

                    switch (alt243) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2178:4: whereClause
                            {
                            pushFollow(FOLLOW_whereClause_in_body13749);
                            whereClause837=whereClause();

                            state._fsp--;

                            stream_whereClause.add(whereClause837.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2179:4: ( groupByClause )?
                    int alt244=2;
                    switch ( input.LA(1) ) {
                        case KW_GROUP:
                            {
                            alt244=1;
                            }
                            break;
                    }

                    switch (alt244) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2179:4: groupByClause
                            {
                            pushFollow(FOLLOW_groupByClause_in_body13755);
                            groupByClause838=groupByClause();

                            state._fsp--;

                            stream_groupByClause.add(groupByClause838.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2180:4: ( havingClause )?
                    int alt245=2;
                    switch ( input.LA(1) ) {
                        case KW_HAVING:
                            {
                            alt245=1;
                            }
                            break;
                    }

                    switch (alt245) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2180:4: havingClause
                            {
                            pushFollow(FOLLOW_havingClause_in_body13761);
                            havingClause839=havingClause();

                            state._fsp--;

                            stream_havingClause.add(havingClause839.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2181:4: ( orderByClause )?
                    int alt246=2;
                    switch ( input.LA(1) ) {
                        case KW_ORDER:
                            {
                            alt246=1;
                            }
                            break;
                    }

                    switch (alt246) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2181:4: orderByClause
                            {
                            pushFollow(FOLLOW_orderByClause_in_body13767);
                            orderByClause840=orderByClause();

                            state._fsp--;

                            stream_orderByClause.add(orderByClause840.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2182:4: ( clusterByClause )?
                    int alt247=2;
                    switch ( input.LA(1) ) {
                        case KW_CLUSTER:
                            {
                            alt247=1;
                            }
                            break;
                    }

                    switch (alt247) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2182:4: clusterByClause
                            {
                            pushFollow(FOLLOW_clusterByClause_in_body13773);
                            clusterByClause841=clusterByClause();

                            state._fsp--;

                            stream_clusterByClause.add(clusterByClause841.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2183:4: ( distributeByClause )?
                    int alt248=2;
                    switch ( input.LA(1) ) {
                        case KW_DISTRIBUTE:
                            {
                            alt248=1;
                            }
                            break;
                    }

                    switch (alt248) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2183:4: distributeByClause
                            {
                            pushFollow(FOLLOW_distributeByClause_in_body13779);
                            distributeByClause842=distributeByClause();

                            state._fsp--;

                            stream_distributeByClause.add(distributeByClause842.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2184:4: ( sortByClause )?
                    int alt249=2;
                    switch ( input.LA(1) ) {
                        case KW_SORT:
                            {
                            alt249=1;
                            }
                            break;
                    }

                    switch (alt249) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2184:4: sortByClause
                            {
                            pushFollow(FOLLOW_sortByClause_in_body13785);
                            sortByClause843=sortByClause();

                            state._fsp--;

                            stream_sortByClause.add(sortByClause843.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2185:4: ( window_clause )?
                    int alt250=2;
                    switch ( input.LA(1) ) {
                        case KW_WINDOW:
                            {
                            alt250=1;
                            }
                            break;
                    }

                    switch (alt250) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2185:4: window_clause
                            {
                            pushFollow(FOLLOW_window_clause_in_body13791);
                            window_clause844=window_clause();

                            state._fsp--;

                            stream_window_clause.add(window_clause844.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2186:4: ( limitClause )?
                    int alt251=2;
                    switch ( input.LA(1) ) {
                        case KW_LIMIT:
                            {
                            alt251=1;
                            }
                            break;
                    }

                    switch (alt251) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2186:4: limitClause
                            {
                            pushFollow(FOLLOW_limitClause_in_body13797);
                            limitClause845=limitClause();

                            state._fsp--;

                            stream_limitClause.add(limitClause845.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: window_clause, clusterByClause, orderByClause, havingClause, limitClause, groupByClause, lateralView, sortByClause, whereClause, insertClause, selectClause, distributeByClause
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2186:17: -> ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2186:20: ^( TOK_INSERT insertClause selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_INSERT, "TOK_INSERT")
                        , root_1);

                        adaptor.addChild(root_1, stream_insertClause.nextTree());

                        adaptor.addChild(root_1, stream_selectClause.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2187:35: ( lateralView )?
                        if ( stream_lateralView.hasNext() ) {
                            adaptor.addChild(root_1, stream_lateralView.nextTree());

                        }
                        stream_lateralView.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2187:48: ( whereClause )?
                        if ( stream_whereClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_whereClause.nextTree());

                        }
                        stream_whereClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2187:61: ( groupByClause )?
                        if ( stream_groupByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_groupByClause.nextTree());

                        }
                        stream_groupByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2187:76: ( havingClause )?
                        if ( stream_havingClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_havingClause.nextTree());

                        }
                        stream_havingClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2187:90: ( orderByClause )?
                        if ( stream_orderByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_orderByClause.nextTree());

                        }
                        stream_orderByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2187:105: ( clusterByClause )?
                        if ( stream_clusterByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_clusterByClause.nextTree());

                        }
                        stream_clusterByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2188:22: ( distributeByClause )?
                        if ( stream_distributeByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_distributeByClause.nextTree());

                        }
                        stream_distributeByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2188:42: ( sortByClause )?
                        if ( stream_sortByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_sortByClause.nextTree());

                        }
                        stream_sortByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2188:56: ( window_clause )?
                        if ( stream_window_clause.hasNext() ) {
                            adaptor.addChild(root_1, stream_window_clause.nextTree());

                        }
                        stream_window_clause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2188:71: ( limitClause )?
                        if ( stream_limitClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_limitClause.nextTree());

                        }
                        stream_limitClause.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2190:4: selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )?
                    {
                    pushFollow(FOLLOW_selectClause_in_body13890);
                    selectClause846=selectClause();

                    state._fsp--;

                    stream_selectClause.add(selectClause846.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2191:4: ( lateralView )?
                    int alt252=2;
                    switch ( input.LA(1) ) {
                        case KW_LATERAL:
                            {
                            alt252=1;
                            }
                            break;
                    }

                    switch (alt252) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2191:4: lateralView
                            {
                            pushFollow(FOLLOW_lateralView_in_body13895);
                            lateralView847=lateralView();

                            state._fsp--;

                            stream_lateralView.add(lateralView847.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2192:4: ( whereClause )?
                    int alt253=2;
                    switch ( input.LA(1) ) {
                        case KW_WHERE:
                            {
                            alt253=1;
                            }
                            break;
                    }

                    switch (alt253) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2192:4: whereClause
                            {
                            pushFollow(FOLLOW_whereClause_in_body13901);
                            whereClause848=whereClause();

                            state._fsp--;

                            stream_whereClause.add(whereClause848.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2193:4: ( groupByClause )?
                    int alt254=2;
                    switch ( input.LA(1) ) {
                        case KW_GROUP:
                            {
                            alt254=1;
                            }
                            break;
                    }

                    switch (alt254) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2193:4: groupByClause
                            {
                            pushFollow(FOLLOW_groupByClause_in_body13907);
                            groupByClause849=groupByClause();

                            state._fsp--;

                            stream_groupByClause.add(groupByClause849.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2194:4: ( havingClause )?
                    int alt255=2;
                    switch ( input.LA(1) ) {
                        case KW_HAVING:
                            {
                            alt255=1;
                            }
                            break;
                    }

                    switch (alt255) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2194:4: havingClause
                            {
                            pushFollow(FOLLOW_havingClause_in_body13913);
                            havingClause850=havingClause();

                            state._fsp--;

                            stream_havingClause.add(havingClause850.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2195:4: ( orderByClause )?
                    int alt256=2;
                    switch ( input.LA(1) ) {
                        case KW_ORDER:
                            {
                            alt256=1;
                            }
                            break;
                    }

                    switch (alt256) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2195:4: orderByClause
                            {
                            pushFollow(FOLLOW_orderByClause_in_body13919);
                            orderByClause851=orderByClause();

                            state._fsp--;

                            stream_orderByClause.add(orderByClause851.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2196:4: ( clusterByClause )?
                    int alt257=2;
                    switch ( input.LA(1) ) {
                        case KW_CLUSTER:
                            {
                            alt257=1;
                            }
                            break;
                    }

                    switch (alt257) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2196:4: clusterByClause
                            {
                            pushFollow(FOLLOW_clusterByClause_in_body13925);
                            clusterByClause852=clusterByClause();

                            state._fsp--;

                            stream_clusterByClause.add(clusterByClause852.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2197:4: ( distributeByClause )?
                    int alt258=2;
                    switch ( input.LA(1) ) {
                        case KW_DISTRIBUTE:
                            {
                            alt258=1;
                            }
                            break;
                    }

                    switch (alt258) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2197:4: distributeByClause
                            {
                            pushFollow(FOLLOW_distributeByClause_in_body13931);
                            distributeByClause853=distributeByClause();

                            state._fsp--;

                            stream_distributeByClause.add(distributeByClause853.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2198:4: ( sortByClause )?
                    int alt259=2;
                    switch ( input.LA(1) ) {
                        case KW_SORT:
                            {
                            alt259=1;
                            }
                            break;
                    }

                    switch (alt259) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2198:4: sortByClause
                            {
                            pushFollow(FOLLOW_sortByClause_in_body13937);
                            sortByClause854=sortByClause();

                            state._fsp--;

                            stream_sortByClause.add(sortByClause854.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2199:4: ( window_clause )?
                    int alt260=2;
                    switch ( input.LA(1) ) {
                        case KW_WINDOW:
                            {
                            alt260=1;
                            }
                            break;
                    }

                    switch (alt260) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2199:4: window_clause
                            {
                            pushFollow(FOLLOW_window_clause_in_body13943);
                            window_clause855=window_clause();

                            state._fsp--;

                            stream_window_clause.add(window_clause855.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2200:4: ( limitClause )?
                    int alt261=2;
                    switch ( input.LA(1) ) {
                        case KW_LIMIT:
                            {
                            alt261=1;
                            }
                            break;
                    }

                    switch (alt261) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2200:4: limitClause
                            {
                            pushFollow(FOLLOW_limitClause_in_body13949);
                            limitClause856=limitClause();

                            state._fsp--;

                            stream_limitClause.add(limitClause856.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: groupByClause, clusterByClause, whereClause, orderByClause, sortByClause, limitClause, havingClause, lateralView, window_clause, distributeByClause, selectClause
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2200:17: -> ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2200:20: ^( TOK_INSERT ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) ) selectClause ( lateralView )? ( whereClause )? ( groupByClause )? ( havingClause )? ( orderByClause )? ( clusterByClause )? ( distributeByClause )? ( sortByClause )? ( window_clause )? ( limitClause )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_INSERT, "TOK_INSERT")
                        , root_1);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2200:33: ^( TOK_DESTINATION ^( TOK_DIR TOK_TMP_FILE ) )
                        {
                        CommonTree root_2 = (CommonTree)adaptor.nil();
                        root_2 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION")
                        , root_2);

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2200:51: ^( TOK_DIR TOK_TMP_FILE )
                        {
                        CommonTree root_3 = (CommonTree)adaptor.nil();
                        root_3 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DIR, "TOK_DIR")
                        , root_3);

                        adaptor.addChild(root_3, 
                        (CommonTree)adaptor.create(TOK_TMP_FILE, "TOK_TMP_FILE")
                        );

                        adaptor.addChild(root_2, root_3);
                        }

                        adaptor.addChild(root_1, root_2);
                        }

                        adaptor.addChild(root_1, stream_selectClause.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2201:35: ( lateralView )?
                        if ( stream_lateralView.hasNext() ) {
                            adaptor.addChild(root_1, stream_lateralView.nextTree());

                        }
                        stream_lateralView.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2201:48: ( whereClause )?
                        if ( stream_whereClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_whereClause.nextTree());

                        }
                        stream_whereClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2201:61: ( groupByClause )?
                        if ( stream_groupByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_groupByClause.nextTree());

                        }
                        stream_groupByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2201:76: ( havingClause )?
                        if ( stream_havingClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_havingClause.nextTree());

                        }
                        stream_havingClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2201:90: ( orderByClause )?
                        if ( stream_orderByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_orderByClause.nextTree());

                        }
                        stream_orderByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2201:105: ( clusterByClause )?
                        if ( stream_clusterByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_clusterByClause.nextTree());

                        }
                        stream_clusterByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2202:22: ( distributeByClause )?
                        if ( stream_distributeByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_distributeByClause.nextTree());

                        }
                        stream_distributeByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2202:42: ( sortByClause )?
                        if ( stream_sortByClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_sortByClause.nextTree());

                        }
                        stream_sortByClause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2202:56: ( window_clause )?
                        if ( stream_window_clause.hasNext() ) {
                            adaptor.addChild(root_1, stream_window_clause.nextTree());

                        }
                        stream_window_clause.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2202:71: ( limitClause )?
                        if ( stream_limitClause.hasNext() ) {
                            adaptor.addChild(root_1, stream_limitClause.nextTree());

                        }
                        stream_limitClause.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "body"


    public static class insertClause_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "insertClause"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2205:1: insertClause : ( KW_INSERT KW_OVERWRITE destination ( ifNotExists )? -> ^( TOK_DESTINATION destination ( ifNotExists )? ) | KW_INSERT KW_INTO KW_TABLE tableOrPartition -> ^( TOK_INSERT_INTO tableOrPartition ) );
    public final HiveParser.insertClause_return insertClause() throws RecognitionException {
        HiveParser.insertClause_return retval = new HiveParser.insertClause_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_INSERT857=null;
        Token KW_OVERWRITE858=null;
        Token KW_INSERT861=null;
        Token KW_INTO862=null;
        Token KW_TABLE863=null;
        HiveParser.destination_return destination859 =null;

        HiveParser.ifNotExists_return ifNotExists860 =null;

        HiveParser_IdentifiersParser.tableOrPartition_return tableOrPartition864 =null;


        CommonTree KW_INSERT857_tree=null;
        CommonTree KW_OVERWRITE858_tree=null;
        CommonTree KW_INSERT861_tree=null;
        CommonTree KW_INTO862_tree=null;
        CommonTree KW_TABLE863_tree=null;
        RewriteRuleTokenStream stream_KW_INTO=new RewriteRuleTokenStream(adaptor,"token KW_INTO");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleTokenStream stream_KW_OVERWRITE=new RewriteRuleTokenStream(adaptor,"token KW_OVERWRITE");
        RewriteRuleTokenStream stream_KW_INSERT=new RewriteRuleTokenStream(adaptor,"token KW_INSERT");
        RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
        RewriteRuleSubtreeStream stream_ifNotExists=new RewriteRuleSubtreeStream(adaptor,"rule ifNotExists");
        RewriteRuleSubtreeStream stream_destination=new RewriteRuleSubtreeStream(adaptor,"rule destination");
         pushMsg("insert clause", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2208:4: ( KW_INSERT KW_OVERWRITE destination ( ifNotExists )? -> ^( TOK_DESTINATION destination ( ifNotExists )? ) | KW_INSERT KW_INTO KW_TABLE tableOrPartition -> ^( TOK_INSERT_INTO tableOrPartition ) )
            int alt264=2;
            switch ( input.LA(1) ) {
            case KW_INSERT:
                {
                switch ( input.LA(2) ) {
                case KW_OVERWRITE:
                    {
                    alt264=1;
                    }
                    break;
                case KW_INTO:
                    {
                    alt264=2;
                    }
                    break;
                default:
                    NoViableAltException nvae =
                        new NoViableAltException("", 264, 1, input);

                    throw nvae;

                }

                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 264, 0, input);

                throw nvae;

            }

            switch (alt264) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:6: KW_INSERT KW_OVERWRITE destination ( ifNotExists )?
                    {
                    KW_INSERT857=(Token)match(input,KW_INSERT,FOLLOW_KW_INSERT_in_insertClause14070);  
                    stream_KW_INSERT.add(KW_INSERT857);


                    KW_OVERWRITE858=(Token)match(input,KW_OVERWRITE,FOLLOW_KW_OVERWRITE_in_insertClause14072);  
                    stream_KW_OVERWRITE.add(KW_OVERWRITE858);


                    pushFollow(FOLLOW_destination_in_insertClause14074);
                    destination859=destination();

                    state._fsp--;

                    stream_destination.add(destination859.getTree());

                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:41: ( ifNotExists )?
                    int alt263=2;
                    switch ( input.LA(1) ) {
                        case KW_IF:
                            {
                            alt263=1;
                            }
                            break;
                    }

                    switch (alt263) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:41: ifNotExists
                            {
                            pushFollow(FOLLOW_ifNotExists_in_insertClause14076);
                            ifNotExists860=ifNotExists();

                            state._fsp--;

                            stream_ifNotExists.add(ifNotExists860.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: ifNotExists, destination
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2209:54: -> ^( TOK_DESTINATION destination ( ifNotExists )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:57: ^( TOK_DESTINATION destination ( ifNotExists )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DESTINATION, "TOK_DESTINATION")
                        , root_1);

                        adaptor.addChild(root_1, stream_destination.nextTree());

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2209:87: ( ifNotExists )?
                        if ( stream_ifNotExists.hasNext() ) {
                            adaptor.addChild(root_1, stream_ifNotExists.nextTree());

                        }
                        stream_ifNotExists.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2210:6: KW_INSERT KW_INTO KW_TABLE tableOrPartition
                    {
                    KW_INSERT861=(Token)match(input,KW_INSERT,FOLLOW_KW_INSERT_in_insertClause14095);  
                    stream_KW_INSERT.add(KW_INSERT861);


                    KW_INTO862=(Token)match(input,KW_INTO,FOLLOW_KW_INTO_in_insertClause14097);  
                    stream_KW_INTO.add(KW_INTO862);


                    KW_TABLE863=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_insertClause14099);  
                    stream_KW_TABLE.add(KW_TABLE863);


                    pushFollow(FOLLOW_tableOrPartition_in_insertClause14101);
                    tableOrPartition864=tableOrPartition();

                    state._fsp--;

                    stream_tableOrPartition.add(tableOrPartition864.getTree());

                    // AST REWRITE
                    // elements: tableOrPartition
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2211:8: -> ^( TOK_INSERT_INTO tableOrPartition )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2211:11: ^( TOK_INSERT_INTO tableOrPartition )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_INSERT_INTO, "TOK_INSERT_INTO")
                        , root_1);

                        adaptor.addChild(root_1, stream_tableOrPartition.nextTree());

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "insertClause"


    public static class destination_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "destination"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2214:1: destination : ( KW_LOCAL KW_DIRECTORY StringLiteral ( tableRowFormat )? ( tableFileFormat )? -> ^( TOK_LOCAL_DIR StringLiteral ( tableRowFormat )? ( tableFileFormat )? ) | KW_DIRECTORY StringLiteral -> ^( TOK_DIR StringLiteral ) | KW_TABLE tableOrPartition -> tableOrPartition );
    public final HiveParser.destination_return destination() throws RecognitionException {
        HiveParser.destination_return retval = new HiveParser.destination_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_LOCAL865=null;
        Token KW_DIRECTORY866=null;
        Token StringLiteral867=null;
        Token KW_DIRECTORY870=null;
        Token StringLiteral871=null;
        Token KW_TABLE872=null;
        HiveParser.tableRowFormat_return tableRowFormat868 =null;

        HiveParser.tableFileFormat_return tableFileFormat869 =null;

        HiveParser_IdentifiersParser.tableOrPartition_return tableOrPartition873 =null;


        CommonTree KW_LOCAL865_tree=null;
        CommonTree KW_DIRECTORY866_tree=null;
        CommonTree StringLiteral867_tree=null;
        CommonTree KW_DIRECTORY870_tree=null;
        CommonTree StringLiteral871_tree=null;
        CommonTree KW_TABLE872_tree=null;
        RewriteRuleTokenStream stream_StringLiteral=new RewriteRuleTokenStream(adaptor,"token StringLiteral");
        RewriteRuleTokenStream stream_KW_DIRECTORY=new RewriteRuleTokenStream(adaptor,"token KW_DIRECTORY");
        RewriteRuleTokenStream stream_KW_LOCAL=new RewriteRuleTokenStream(adaptor,"token KW_LOCAL");
        RewriteRuleTokenStream stream_KW_TABLE=new RewriteRuleTokenStream(adaptor,"token KW_TABLE");
        RewriteRuleSubtreeStream stream_tableOrPartition=new RewriteRuleSubtreeStream(adaptor,"rule tableOrPartition");
        RewriteRuleSubtreeStream stream_tableRowFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableRowFormat");
        RewriteRuleSubtreeStream stream_tableFileFormat=new RewriteRuleSubtreeStream(adaptor,"rule tableFileFormat");
         pushMsg("destination specification", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2217:4: ( KW_LOCAL KW_DIRECTORY StringLiteral ( tableRowFormat )? ( tableFileFormat )? -> ^( TOK_LOCAL_DIR StringLiteral ( tableRowFormat )? ( tableFileFormat )? ) | KW_DIRECTORY StringLiteral -> ^( TOK_DIR StringLiteral ) | KW_TABLE tableOrPartition -> tableOrPartition )
            int alt267=3;
            switch ( input.LA(1) ) {
            case KW_LOCAL:
                {
                alt267=1;
                }
                break;
            case KW_DIRECTORY:
                {
                alt267=2;
                }
                break;
            case KW_TABLE:
                {
                alt267=3;
                }
                break;
            default:
                NoViableAltException nvae =
                    new NoViableAltException("", 267, 0, input);

                throw nvae;

            }

            switch (alt267) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:6: KW_LOCAL KW_DIRECTORY StringLiteral ( tableRowFormat )? ( tableFileFormat )?
                    {
                    KW_LOCAL865=(Token)match(input,KW_LOCAL,FOLLOW_KW_LOCAL_in_destination14146);  
                    stream_KW_LOCAL.add(KW_LOCAL865);


                    KW_DIRECTORY866=(Token)match(input,KW_DIRECTORY,FOLLOW_KW_DIRECTORY_in_destination14148);  
                    stream_KW_DIRECTORY.add(KW_DIRECTORY866);


                    StringLiteral867=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_destination14150);  
                    stream_StringLiteral.add(StringLiteral867);


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:42: ( tableRowFormat )?
                    int alt265=2;
                    switch ( input.LA(1) ) {
                        case KW_ROW:
                            {
                            alt265=1;
                            }
                            break;
                    }

                    switch (alt265) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:42: tableRowFormat
                            {
                            pushFollow(FOLLOW_tableRowFormat_in_destination14152);
                            tableRowFormat868=tableRowFormat();

                            state._fsp--;

                            stream_tableRowFormat.add(tableRowFormat868.getTree());

                            }
                            break;

                    }


                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:58: ( tableFileFormat )?
                    int alt266=2;
                    switch ( input.LA(1) ) {
                        case KW_STORED:
                            {
                            alt266=1;
                            }
                            break;
                    }

                    switch (alt266) {
                        case 1 :
                            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:58: tableFileFormat
                            {
                            pushFollow(FOLLOW_tableFileFormat_in_destination14155);
                            tableFileFormat869=tableFileFormat();

                            state._fsp--;

                            stream_tableFileFormat.add(tableFileFormat869.getTree());

                            }
                            break;

                    }


                    // AST REWRITE
                    // elements: tableFileFormat, tableRowFormat, StringLiteral
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2218:75: -> ^( TOK_LOCAL_DIR StringLiteral ( tableRowFormat )? ( tableFileFormat )? )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:78: ^( TOK_LOCAL_DIR StringLiteral ( tableRowFormat )? ( tableFileFormat )? )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_LOCAL_DIR, "TOK_LOCAL_DIR")
                        , root_1);

                        adaptor.addChild(root_1, 
                        stream_StringLiteral.nextNode()
                        );

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:108: ( tableRowFormat )?
                        if ( stream_tableRowFormat.hasNext() ) {
                            adaptor.addChild(root_1, stream_tableRowFormat.nextTree());

                        }
                        stream_tableRowFormat.reset();

                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2218:124: ( tableFileFormat )?
                        if ( stream_tableFileFormat.hasNext() ) {
                            adaptor.addChild(root_1, stream_tableFileFormat.nextTree());

                        }
                        stream_tableFileFormat.reset();

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 2 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2219:6: KW_DIRECTORY StringLiteral
                    {
                    KW_DIRECTORY870=(Token)match(input,KW_DIRECTORY,FOLLOW_KW_DIRECTORY_in_destination14177);  
                    stream_KW_DIRECTORY.add(KW_DIRECTORY870);


                    StringLiteral871=(Token)match(input,StringLiteral,FOLLOW_StringLiteral_in_destination14179);  
                    stream_StringLiteral.add(StringLiteral871);


                    // AST REWRITE
                    // elements: StringLiteral
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2219:33: -> ^( TOK_DIR StringLiteral )
                    {
                        // org/apache/hadoop/hive/ql/parse/HiveParser.g:2219:36: ^( TOK_DIR StringLiteral )
                        {
                        CommonTree root_1 = (CommonTree)adaptor.nil();
                        root_1 = (CommonTree)adaptor.becomeRoot(
                        (CommonTree)adaptor.create(TOK_DIR, "TOK_DIR")
                        , root_1);

                        adaptor.addChild(root_1, 
                        stream_StringLiteral.nextNode()
                        );

                        adaptor.addChild(root_0, root_1);
                        }

                    }


                    retval.tree = root_0;

                    }
                    break;
                case 3 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2220:6: KW_TABLE tableOrPartition
                    {
                    KW_TABLE872=(Token)match(input,KW_TABLE,FOLLOW_KW_TABLE_in_destination14194);  
                    stream_KW_TABLE.add(KW_TABLE872);


                    pushFollow(FOLLOW_tableOrPartition_in_destination14196);
                    tableOrPartition873=tableOrPartition();

                    state._fsp--;

                    stream_tableOrPartition.add(tableOrPartition873.getTree());

                    // AST REWRITE
                    // elements: tableOrPartition
                    // token labels: 
                    // rule labels: retval
                    // token list labels: 
                    // rule list labels: 
                    // wildcard labels: 
                    retval.tree = root_0;
                    RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

                    root_0 = (CommonTree)adaptor.nil();
                    // 2220:32: -> tableOrPartition
                    {
                        adaptor.addChild(root_0, stream_tableOrPartition.nextTree());

                    }


                    retval.tree = root_0;

                    }
                    break;

            }
            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "destination"


    public static class limitClause_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "limitClause"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2223:1: limitClause : KW_LIMIT num= Number -> ^( TOK_LIMIT $num) ;
    public final HiveParser.limitClause_return limitClause() throws RecognitionException {
        HiveParser.limitClause_return retval = new HiveParser.limitClause_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token num=null;
        Token KW_LIMIT874=null;

        CommonTree num_tree=null;
        CommonTree KW_LIMIT874_tree=null;
        RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number");
        RewriteRuleTokenStream stream_KW_LIMIT=new RewriteRuleTokenStream(adaptor,"token KW_LIMIT");

         pushMsg("limit clause", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2226:4: ( KW_LIMIT num= Number -> ^( TOK_LIMIT $num) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2227:4: KW_LIMIT num= Number
            {
            KW_LIMIT874=(Token)match(input,KW_LIMIT,FOLLOW_KW_LIMIT_in_limitClause14228);  
            stream_KW_LIMIT.add(KW_LIMIT874);


            num=(Token)match(input,Number,FOLLOW_Number_in_limitClause14232);  
            stream_Number.add(num);


            // AST REWRITE
            // elements: num
            // token labels: num
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleTokenStream stream_num=new RewriteRuleTokenStream(adaptor,"token num",num);
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2227:24: -> ^( TOK_LIMIT $num)
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2227:27: ^( TOK_LIMIT $num)
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_LIMIT, "TOK_LIMIT")
                , root_1);

                adaptor.addChild(root_1, stream_num.nextNode());

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "limitClause"


    public static class deleteStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "deleteStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2231:1: deleteStatement : KW_DELETE KW_FROM tableName ( whereClause )? -> ^( TOK_DELETE_FROM tableName ( whereClause )? ) ;
    public final HiveParser.deleteStatement_return deleteStatement() throws RecognitionException {
        HiveParser.deleteStatement_return retval = new HiveParser.deleteStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_DELETE875=null;
        Token KW_FROM876=null;
        HiveParser_FromClauseParser.tableName_return tableName877 =null;

        HiveParser_FromClauseParser.whereClause_return whereClause878 =null;


        CommonTree KW_DELETE875_tree=null;
        CommonTree KW_FROM876_tree=null;
        RewriteRuleTokenStream stream_KW_DELETE=new RewriteRuleTokenStream(adaptor,"token KW_DELETE");
        RewriteRuleTokenStream stream_KW_FROM=new RewriteRuleTokenStream(adaptor,"token KW_FROM");
        RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
         pushMsg("delete statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2234:4: ( KW_DELETE KW_FROM tableName ( whereClause )? -> ^( TOK_DELETE_FROM tableName ( whereClause )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2235:4: KW_DELETE KW_FROM tableName ( whereClause )?
            {
            KW_DELETE875=(Token)match(input,KW_DELETE,FOLLOW_KW_DELETE_in_deleteStatement14270);  
            stream_KW_DELETE.add(KW_DELETE875);


            KW_FROM876=(Token)match(input,KW_FROM,FOLLOW_KW_FROM_in_deleteStatement14272);  
            stream_KW_FROM.add(KW_FROM876);


            pushFollow(FOLLOW_tableName_in_deleteStatement14274);
            tableName877=tableName();

            state._fsp--;

            stream_tableName.add(tableName877.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2235:32: ( whereClause )?
            int alt268=2;
            switch ( input.LA(1) ) {
                case KW_WHERE:
                    {
                    alt268=1;
                    }
                    break;
            }

            switch (alt268) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2235:33: whereClause
                    {
                    pushFollow(FOLLOW_whereClause_in_deleteStatement14277);
                    whereClause878=whereClause();

                    state._fsp--;

                    stream_whereClause.add(whereClause878.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: whereClause, tableName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2235:47: -> ^( TOK_DELETE_FROM tableName ( whereClause )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2235:50: ^( TOK_DELETE_FROM tableName ( whereClause )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_DELETE_FROM, "TOK_DELETE_FROM")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2235:78: ( whereClause )?
                if ( stream_whereClause.hasNext() ) {
                    adaptor.addChild(root_1, stream_whereClause.nextTree());

                }
                stream_whereClause.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "deleteStatement"


    public static class columnAssignmentClause_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "columnAssignmentClause"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2239:1: columnAssignmentClause : tableOrColumn EQUAL ^ precedencePlusExpression ;
    public final HiveParser.columnAssignmentClause_return columnAssignmentClause() throws RecognitionException {
        HiveParser.columnAssignmentClause_return retval = new HiveParser.columnAssignmentClause_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token EQUAL880=null;
        HiveParser_FromClauseParser.tableOrColumn_return tableOrColumn879 =null;

        HiveParser_IdentifiersParser.precedencePlusExpression_return precedencePlusExpression881 =null;


        CommonTree EQUAL880_tree=null;

        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2240:4: ( tableOrColumn EQUAL ^ precedencePlusExpression )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2241:4: tableOrColumn EQUAL ^ precedencePlusExpression
            {
            root_0 = (CommonTree)adaptor.nil();


            pushFollow(FOLLOW_tableOrColumn_in_columnAssignmentClause14310);
            tableOrColumn879=tableOrColumn();

            state._fsp--;

            adaptor.addChild(root_0, tableOrColumn879.getTree());

            EQUAL880=(Token)match(input,EQUAL,FOLLOW_EQUAL_in_columnAssignmentClause14312); 
            EQUAL880_tree = 
            (CommonTree)adaptor.create(EQUAL880)
            ;
            root_0 = (CommonTree)adaptor.becomeRoot(EQUAL880_tree, root_0);


            pushFollow(FOLLOW_precedencePlusExpression_in_columnAssignmentClause14315);
            precedencePlusExpression881=precedencePlusExpression();

            state._fsp--;

            adaptor.addChild(root_0, precedencePlusExpression881.getTree());

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "columnAssignmentClause"


    public static class setColumnsClause_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "setColumnsClause"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2245:1: setColumnsClause : KW_SET columnAssignmentClause ( COMMA columnAssignmentClause )* -> ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* ) ;
    public final HiveParser.setColumnsClause_return setColumnsClause() throws RecognitionException {
        HiveParser.setColumnsClause_return retval = new HiveParser.setColumnsClause_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_SET882=null;
        Token COMMA884=null;
        HiveParser.columnAssignmentClause_return columnAssignmentClause883 =null;

        HiveParser.columnAssignmentClause_return columnAssignmentClause885 =null;


        CommonTree KW_SET882_tree=null;
        CommonTree COMMA884_tree=null;
        RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
        RewriteRuleTokenStream stream_KW_SET=new RewriteRuleTokenStream(adaptor,"token KW_SET");
        RewriteRuleSubtreeStream stream_columnAssignmentClause=new RewriteRuleSubtreeStream(adaptor,"rule columnAssignmentClause");
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2246:4: ( KW_SET columnAssignmentClause ( COMMA columnAssignmentClause )* -> ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2247:4: KW_SET columnAssignmentClause ( COMMA columnAssignmentClause )*
            {
            KW_SET882=(Token)match(input,KW_SET,FOLLOW_KW_SET_in_setColumnsClause14335);  
            stream_KW_SET.add(KW_SET882);


            pushFollow(FOLLOW_columnAssignmentClause_in_setColumnsClause14337);
            columnAssignmentClause883=columnAssignmentClause();

            state._fsp--;

            stream_columnAssignmentClause.add(columnAssignmentClause883.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2247:34: ( COMMA columnAssignmentClause )*
            loop269:
            do {
                int alt269=2;
                switch ( input.LA(1) ) {
                case COMMA:
                    {
                    alt269=1;
                    }
                    break;

                }

                switch (alt269) {
            	case 1 :
            	    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2247:35: COMMA columnAssignmentClause
            	    {
            	    COMMA884=(Token)match(input,COMMA,FOLLOW_COMMA_in_setColumnsClause14340);  
            	    stream_COMMA.add(COMMA884);


            	    pushFollow(FOLLOW_columnAssignmentClause_in_setColumnsClause14342);
            	    columnAssignmentClause885=columnAssignmentClause();

            	    state._fsp--;

            	    stream_columnAssignmentClause.add(columnAssignmentClause885.getTree());

            	    }
            	    break;

            	default :
            	    break loop269;
                }
            } while (true);


            // AST REWRITE
            // elements: columnAssignmentClause
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2247:66: -> ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2247:69: ^( TOK_SET_COLUMNS_CLAUSE ( columnAssignmentClause )* )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_SET_COLUMNS_CLAUSE, "TOK_SET_COLUMNS_CLAUSE")
                , root_1);

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2247:94: ( columnAssignmentClause )*
                while ( stream_columnAssignmentClause.hasNext() ) {
                    adaptor.addChild(root_1, stream_columnAssignmentClause.nextTree());

                }
                stream_columnAssignmentClause.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "setColumnsClause"


    public static class updateStatement_return extends ParserRuleReturnScope {
        CommonTree tree;
        public Object getTree() { return tree; }
    };


    // $ANTLR start "updateStatement"
    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2254:1: updateStatement : KW_UPDATE tableName setColumnsClause ( whereClause )? -> ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? ) ;
    public final HiveParser.updateStatement_return updateStatement() throws RecognitionException {
        HiveParser.updateStatement_return retval = new HiveParser.updateStatement_return();
        retval.start = input.LT(1);


        CommonTree root_0 = null;

        Token KW_UPDATE886=null;
        HiveParser_FromClauseParser.tableName_return tableName887 =null;

        HiveParser.setColumnsClause_return setColumnsClause888 =null;

        HiveParser_FromClauseParser.whereClause_return whereClause889 =null;


        CommonTree KW_UPDATE886_tree=null;
        RewriteRuleTokenStream stream_KW_UPDATE=new RewriteRuleTokenStream(adaptor,"token KW_UPDATE");
        RewriteRuleSubtreeStream stream_whereClause=new RewriteRuleSubtreeStream(adaptor,"rule whereClause");
        RewriteRuleSubtreeStream stream_tableName=new RewriteRuleSubtreeStream(adaptor,"rule tableName");
        RewriteRuleSubtreeStream stream_setColumnsClause=new RewriteRuleSubtreeStream(adaptor,"rule setColumnsClause");
         pushMsg("update statement", state); 
        try {
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2257:4: ( KW_UPDATE tableName setColumnsClause ( whereClause )? -> ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? ) )
            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2258:4: KW_UPDATE tableName setColumnsClause ( whereClause )?
            {
            KW_UPDATE886=(Token)match(input,KW_UPDATE,FOLLOW_KW_UPDATE_in_updateStatement14384);  
            stream_KW_UPDATE.add(KW_UPDATE886);


            pushFollow(FOLLOW_tableName_in_updateStatement14386);
            tableName887=tableName();

            state._fsp--;

            stream_tableName.add(tableName887.getTree());

            pushFollow(FOLLOW_setColumnsClause_in_updateStatement14388);
            setColumnsClause888=setColumnsClause();

            state._fsp--;

            stream_setColumnsClause.add(setColumnsClause888.getTree());

            // org/apache/hadoop/hive/ql/parse/HiveParser.g:2258:41: ( whereClause )?
            int alt270=2;
            switch ( input.LA(1) ) {
                case KW_WHERE:
                    {
                    alt270=1;
                    }
                    break;
            }

            switch (alt270) {
                case 1 :
                    // org/apache/hadoop/hive/ql/parse/HiveParser.g:2258:41: whereClause
                    {
                    pushFollow(FOLLOW_whereClause_in_updateStatement14390);
                    whereClause889=whereClause();

                    state._fsp--;

                    stream_whereClause.add(whereClause889.getTree());

                    }
                    break;

            }


            // AST REWRITE
            // elements: whereClause, setColumnsClause, tableName
            // token labels: 
            // rule labels: retval
            // token list labels: 
            // rule list labels: 
            // wildcard labels: 
            retval.tree = root_0;
            RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);

            root_0 = (CommonTree)adaptor.nil();
            // 2258:54: -> ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? )
            {
                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2258:57: ^( TOK_UPDATE_TABLE tableName setColumnsClause ( whereClause )? )
                {
                CommonTree root_1 = (CommonTree)adaptor.nil();
                root_1 = (CommonTree)adaptor.becomeRoot(
                (CommonTree)adaptor.create(TOK_UPDATE_TABLE, "TOK_UPDATE_TABLE")
                , root_1);

                adaptor.addChild(root_1, stream_tableName.nextTree());

                adaptor.addChild(root_1, stream_setColumnsClause.nextTree());

                // org/apache/hadoop/hive/ql/parse/HiveParser.g:2258:103: ( whereClause )?
                if ( stream_whereClause.hasNext() ) {
                    adaptor.addChild(root_1, stream_whereClause.nextTree());

                }
                stream_whereClause.reset();

                adaptor.addChild(root_0, root_1);
                }

            }


            retval.tree = root_0;

            }

            retval.stop = input.LT(-1);


            retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
            adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);

             popMsg(state); 
        }

        catch (RecognitionException e) {
         reportError(e);
          throw e;
        }

        finally {
        	// do for sure before leaving
        }
        return retval;
    }
    // $ANTLR end "updateStatement"

    // Delegated rules
    public HiveParser_IdentifiersParser.dropPartitionOperator_return dropPartitionOperator() throws RecognitionException { return gIdentifiersParser.dropPartitionOperator(); }
    public HiveParser_IdentifiersParser.identifier_return identifier() throws RecognitionException { return gIdentifiersParser.identifier(); }
    public HiveParser_IdentifiersParser.precedenceBitwiseXorOperator_return precedenceBitwiseXorOperator() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseXorOperator(); }
    public HiveParser_IdentifiersParser.precedenceNotExpression_return precedenceNotExpression() throws RecognitionException { return gIdentifiersParser.precedenceNotExpression(); }
    public HiveParser_SelectClauseParser.window_frame_boundary_return window_frame_boundary() throws RecognitionException { return gSelectClauseParser.window_frame_boundary(); }
    public HiveParser_IdentifiersParser.precedenceUnaryPrefixExpression_return precedenceUnaryPrefixExpression() throws RecognitionException { return gIdentifiersParser.precedenceUnaryPrefixExpression(); }
    public HiveParser_IdentifiersParser.tableOrPartition_return tableOrPartition() throws RecognitionException { return gIdentifiersParser.tableOrPartition(); }
    public HiveParser_IdentifiersParser.dateLiteral_return dateLiteral() throws RecognitionException { return gIdentifiersParser.dateLiteral(); }
    public HiveParser_IdentifiersParser.whenExpression_return whenExpression() throws RecognitionException { return gIdentifiersParser.whenExpression(); }
    public HiveParser_IdentifiersParser.dropPartitionSpec_return dropPartitionSpec() throws RecognitionException { return gIdentifiersParser.dropPartitionSpec(); }
    public HiveParser_SelectClauseParser.window_defn_return window_defn() throws RecognitionException { return gSelectClauseParser.window_defn(); }
    public HiveParser_IdentifiersParser.groupByExpression_return groupByExpression() throws RecognitionException { return gIdentifiersParser.groupByExpression(); }
    public HiveParser_IdentifiersParser.orderByClause_return orderByClause() throws RecognitionException { return gIdentifiersParser.orderByClause(); }
    public HiveParser_IdentifiersParser.groupByClause_return groupByClause() throws RecognitionException { return gIdentifiersParser.groupByClause(); }
    public HiveParser_IdentifiersParser.sortByClause_return sortByClause() throws RecognitionException { return gIdentifiersParser.sortByClause(); }
    public HiveParser_FromClauseParser.subQuerySource_return subQuerySource() throws RecognitionException { return gFromClauseParser.subQuerySource(); }
    public HiveParser_FromClauseParser.tableAlias_return tableAlias() throws RecognitionException { return gFromClauseParser.tableAlias(); }
    public HiveParser_SelectClauseParser.window_range_expression_return window_range_expression() throws RecognitionException { return gSelectClauseParser.window_range_expression(); }
    public HiveParser_IdentifiersParser.precedencePlusOperator_return precedencePlusOperator() throws RecognitionException { return gIdentifiersParser.precedencePlusOperator(); }
    public HiveParser_IdentifiersParser.function_return function() throws RecognitionException { return gIdentifiersParser.function(); }
    public HiveParser_FromClauseParser.fromSource_return fromSource() throws RecognitionException { return gFromClauseParser.fromSource(); }
    public HiveParser_IdentifiersParser.precedenceEqualOperator_return precedenceEqualOperator() throws RecognitionException { return gIdentifiersParser.precedenceEqualOperator(); }
    public HiveParser_IdentifiersParser.clusterByClause_return clusterByClause() throws RecognitionException { return gIdentifiersParser.clusterByClause(); }
    public HiveParser_SelectClauseParser.selectItem_return selectItem() throws RecognitionException { return gSelectClauseParser.selectItem(); }
    public HiveParser_IdentifiersParser.dropPartitionVal_return dropPartitionVal() throws RecognitionException { return gIdentifiersParser.dropPartitionVal(); }
    public HiveParser_SelectClauseParser.hintClause_return hintClause() throws RecognitionException { return gSelectClauseParser.hintClause(); }
    public HiveParser_IdentifiersParser.functionIdentifier_return functionIdentifier() throws RecognitionException { return gIdentifiersParser.functionIdentifier(); }
    public HiveParser_IdentifiersParser.precedenceAmpersandExpression_return precedenceAmpersandExpression() throws RecognitionException { return gIdentifiersParser.precedenceAmpersandExpression(); }
    public HiveParser_SelectClauseParser.window_clause_return window_clause() throws RecognitionException { return gSelectClauseParser.window_clause(); }
    public HiveParser_SelectClauseParser.hintArgName_return hintArgName() throws RecognitionException { return gSelectClauseParser.hintArgName(); }
    public HiveParser_FromClauseParser.joinSource_return joinSource() throws RecognitionException { return gFromClauseParser.joinSource(); }
    public HiveParser_SelectClauseParser.window_specification_return window_specification() throws RecognitionException { return gSelectClauseParser.window_specification(); }
    public HiveParser_SelectClauseParser.selectTrfmClause_return selectTrfmClause() throws RecognitionException { return gSelectClauseParser.selectTrfmClause(); }
    public HiveParser_SelectClauseParser.selectClause_return selectClause() throws RecognitionException { return gSelectClauseParser.selectClause(); }
    public HiveParser_IdentifiersParser.precedenceBitwiseOrExpression_return precedenceBitwiseOrExpression() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseOrExpression(); }
    public HiveParser_IdentifiersParser.precedenceNotOperator_return precedenceNotOperator() throws RecognitionException { return gIdentifiersParser.precedenceNotOperator(); }
    public HiveParser_IdentifiersParser.precedenceEqualExpression_return precedenceEqualExpression() throws RecognitionException { return gIdentifiersParser.precedenceEqualExpression(); }
    public HiveParser_IdentifiersParser.precedenceStarOperator_return precedenceStarOperator() throws RecognitionException { return gIdentifiersParser.precedenceStarOperator(); }
    public HiveParser_FromClauseParser.viewName_return viewName() throws RecognitionException { return gFromClauseParser.viewName(); }
    public HiveParser_IdentifiersParser.partitionSpec_return partitionSpec() throws RecognitionException { return gIdentifiersParser.partitionSpec(); }
    public HiveParser_FromClauseParser.lateralView_return lateralView() throws RecognitionException { return gFromClauseParser.lateralView(); }
    public HiveParser_IdentifiersParser.nonReserved_return nonReserved() throws RecognitionException { return gIdentifiersParser.nonReserved(); }
    public HiveParser_FromClauseParser.whereClause_return whereClause() throws RecognitionException { return gFromClauseParser.whereClause(); }
    public HiveParser_FromClauseParser.tableOrColumn_return tableOrColumn() throws RecognitionException { return gFromClauseParser.tableOrColumn(); }
    public HiveParser_FromClauseParser.uniqueJoinExpr_return uniqueJoinExpr() throws RecognitionException { return gFromClauseParser.uniqueJoinExpr(); }
    public HiveParser_IdentifiersParser.precedenceOrOperator_return precedenceOrOperator() throws RecognitionException { return gIdentifiersParser.precedenceOrOperator(); }
    public HiveParser_FromClauseParser.partitioningSpec_return partitioningSpec() throws RecognitionException { return gFromClauseParser.partitioningSpec(); }
    public HiveParser_FromClauseParser.uniqueJoinSource_return uniqueJoinSource() throws RecognitionException { return gFromClauseParser.uniqueJoinSource(); }
    public HiveParser_FromClauseParser.fromClause_return fromClause() throws RecognitionException { return gFromClauseParser.fromClause(); }
    public HiveParser_IdentifiersParser.partitionVal_return partitionVal() throws RecognitionException { return gIdentifiersParser.partitionVal(); }
    public HiveParser_IdentifiersParser.precedenceBitwiseOrOperator_return precedenceBitwiseOrOperator() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseOrOperator(); }
    public HiveParser_IdentifiersParser.caseExpression_return caseExpression() throws RecognitionException { return gIdentifiersParser.caseExpression(); }
    public HiveParser_IdentifiersParser.precedencePlusExpression_return precedencePlusExpression() throws RecognitionException { return gIdentifiersParser.precedencePlusExpression(); }
    public HiveParser_IdentifiersParser.booleanValue_return booleanValue() throws RecognitionException { return gIdentifiersParser.booleanValue(); }
    public HiveParser_IdentifiersParser.precedenceUnaryOperator_return precedenceUnaryOperator() throws RecognitionException { return gIdentifiersParser.precedenceUnaryOperator(); }
    public HiveParser_FromClauseParser.tableSample_return tableSample() throws RecognitionException { return gFromClauseParser.tableSample(); }
    public HiveParser_FromClauseParser.aliasList_return aliasList() throws RecognitionException { return gFromClauseParser.aliasList(); }
    public HiveParser_FromClauseParser.splitSample_return splitSample() throws RecognitionException { return gFromClauseParser.splitSample(); }
    public HiveParser_IdentifiersParser.constant_return constant() throws RecognitionException { return gIdentifiersParser.constant(); }
    public HiveParser_FromClauseParser.expressionList_return expressionList() throws RecognitionException { return gFromClauseParser.expressionList(); }
    public HiveParser_IdentifiersParser.precedenceAmpersandOperator_return precedenceAmpersandOperator() throws RecognitionException { return gIdentifiersParser.precedenceAmpersandOperator(); }
    public HiveParser_IdentifiersParser.subQueryExpression_return subQueryExpression() throws RecognitionException { return gIdentifiersParser.subQueryExpression(); }
    public HiveParser_IdentifiersParser.groupingSetExpression_return groupingSetExpression() throws RecognitionException { return gIdentifiersParser.groupingSetExpression(); }
    public HiveParser_IdentifiersParser.expression_return expression() throws RecognitionException { return gIdentifiersParser.expression(); }
    public HiveParser_IdentifiersParser.precedenceStarExpression_return precedenceStarExpression() throws RecognitionException { return gIdentifiersParser.precedenceStarExpression(); }
    public HiveParser_FromClauseParser.tableAllColumns_return tableAllColumns() throws RecognitionException { return gFromClauseParser.tableAllColumns(); }
    public HiveParser_IdentifiersParser.precedenceAndOperator_return precedenceAndOperator() throws RecognitionException { return gIdentifiersParser.precedenceAndOperator(); }
    public HiveParser_IdentifiersParser.partitionByClause_return partitionByClause() throws RecognitionException { return gIdentifiersParser.partitionByClause(); }
    public HiveParser_SelectClauseParser.selectList_return selectList() throws RecognitionException { return gSelectClauseParser.selectList(); }
    public HiveParser_FromClauseParser.valueRowConstructor_return valueRowConstructor() throws RecognitionException { return gFromClauseParser.valueRowConstructor(); }
    public HiveParser_FromClauseParser.valuesTableConstructor_return valuesTableConstructor() throws RecognitionException { return gFromClauseParser.valuesTableConstructor(); }
    public HiveParser_SelectClauseParser.hintItem_return hintItem() throws RecognitionException { return gSelectClauseParser.hintItem(); }
    public HiveParser_IdentifiersParser.distributeByClause_return distributeByClause() throws RecognitionException { return gIdentifiersParser.distributeByClause(); }
    public HiveParser_IdentifiersParser.charSetStringLiteral_return charSetStringLiteral() throws RecognitionException { return gIdentifiersParser.charSetStringLiteral(); }
    public HiveParser_IdentifiersParser.expressions_return expressions() throws RecognitionException { return gIdentifiersParser.expressions(); }
    public HiveParser_SelectClauseParser.trfmClause_return trfmClause() throws RecognitionException { return gSelectClauseParser.trfmClause(); }
    public HiveParser_SelectClauseParser.selectExpression_return selectExpression() throws RecognitionException { return gSelectClauseParser.selectExpression(); }
    public HiveParser_IdentifiersParser.precedenceFieldExpression_return precedenceFieldExpression() throws RecognitionException { return gIdentifiersParser.precedenceFieldExpression(); }
    public HiveParser_SelectClauseParser.hintList_return hintList() throws RecognitionException { return gSelectClauseParser.hintList(); }
    public HiveParser_SelectClauseParser.window_frame_start_boundary_return window_frame_start_boundary() throws RecognitionException { return gSelectClauseParser.window_frame_start_boundary(); }
    public HiveParser_IdentifiersParser.precedenceEqualNegatableOperator_return precedenceEqualNegatableOperator() throws RecognitionException { return gIdentifiersParser.precedenceEqualNegatableOperator(); }
    public HiveParser_FromClauseParser.joinToken_return joinToken() throws RecognitionException { return gFromClauseParser.joinToken(); }
    public HiveParser_IdentifiersParser.castExpression_return castExpression() throws RecognitionException { return gIdentifiersParser.castExpression(); }
    public HiveParser_IdentifiersParser.principalIdentifier_return principalIdentifier() throws RecognitionException { return gIdentifiersParser.principalIdentifier(); }
    public HiveParser_IdentifiersParser.descFuncNames_return descFuncNames() throws RecognitionException { return gIdentifiersParser.descFuncNames(); }
    public HiveParser_IdentifiersParser.stringLiteralSequence_return stringLiteralSequence() throws RecognitionException { return gIdentifiersParser.stringLiteralSequence(); }
    public HiveParser_SelectClauseParser.window_frame_return window_frame() throws RecognitionException { return gSelectClauseParser.window_frame(); }
    public HiveParser_FromClauseParser.tableNameColList_return tableNameColList() throws RecognitionException { return gFromClauseParser.tableNameColList(); }
    public HiveParser_SelectClauseParser.selectExpressionList_return selectExpressionList() throws RecognitionException { return gSelectClauseParser.selectExpressionList(); }
    public HiveParser_IdentifiersParser.havingCondition_return havingCondition() throws RecognitionException { return gIdentifiersParser.havingCondition(); }
    public HiveParser_IdentifiersParser.havingClause_return havingClause() throws RecognitionException { return gIdentifiersParser.havingClause(); }
    public HiveParser_FromClauseParser.partitionedTableFunction_return partitionedTableFunction() throws RecognitionException { return gFromClauseParser.partitionedTableFunction(); }
    public HiveParser_FromClauseParser.tableSource_return tableSource() throws RecognitionException { return gFromClauseParser.tableSource(); }
    public HiveParser_IdentifiersParser.atomExpression_return atomExpression() throws RecognitionException { return gIdentifiersParser.atomExpression(); }
    public HiveParser_FromClauseParser.searchCondition_return searchCondition() throws RecognitionException { return gFromClauseParser.searchCondition(); }
    public HiveParser_FromClauseParser.virtualTableSource_return virtualTableSource() throws RecognitionException { return gFromClauseParser.virtualTableSource(); }
    public HiveParser_IdentifiersParser.functionName_return functionName() throws RecognitionException { return gIdentifiersParser.functionName(); }
    public HiveParser_FromClauseParser.tableBucketSample_return tableBucketSample() throws RecognitionException { return gFromClauseParser.tableBucketSample(); }
    public HiveParser_IdentifiersParser.sysFuncNames_return sysFuncNames() throws RecognitionException { return gIdentifiersParser.sysFuncNames(); }
    public HiveParser_IdentifiersParser.nullCondition_return nullCondition() throws RecognitionException { return gIdentifiersParser.nullCondition(); }
    public HiveParser_SelectClauseParser.hintName_return hintName() throws RecognitionException { return gSelectClauseParser.hintName(); }
    public HiveParser_FromClauseParser.tableName_return tableName() throws RecognitionException { return gFromClauseParser.tableName(); }
    public HiveParser_IdentifiersParser.precedenceBitwiseXorExpression_return precedenceBitwiseXorExpression() throws RecognitionException { return gIdentifiersParser.precedenceBitwiseXorExpression(); }
    public HiveParser_IdentifiersParser.precedenceAndExpression_return precedenceAndExpression() throws RecognitionException { return gIdentifiersParser.precedenceAndExpression(); }
    public HiveParser_SelectClauseParser.hintArgs_return hintArgs() throws RecognitionException { return gSelectClauseParser.hintArgs(); }
    public HiveParser_FromClauseParser.valuesClause_return valuesClause() throws RecognitionException { return gFromClauseParser.valuesClause(); }
    public HiveParser_SelectClauseParser.window_value_expression_return window_value_expression() throws RecognitionException { return gSelectClauseParser.window_value_expression(); }
    public HiveParser_IdentifiersParser.precedenceOrExpression_return precedenceOrExpression() throws RecognitionException { return gIdentifiersParser.precedenceOrExpression(); }
    public HiveParser_IdentifiersParser.precedenceUnarySuffixExpression_return precedenceUnarySuffixExpression() throws RecognitionException { return gIdentifiersParser.precedenceUnarySuffixExpression(); }
    public HiveParser_FromClauseParser.uniqueJoinToken_return uniqueJoinToken() throws RecognitionException { return gFromClauseParser.uniqueJoinToken(); }
    public HiveParser_FromClauseParser.partitionTableFunctionSource_return partitionTableFunctionSource() throws RecognitionException { return gFromClauseParser.partitionTableFunctionSource(); }


    protected DFA10 dfa10 = new DFA10(this);
    protected DFA193 dfa193 = new DFA193(this);
    static final String DFA10_eotS =
        "\u00b4\uffff";
    static final String DFA10_eofS =
        "\u00b4\uffff";
    static final String DFA10_minS =
        "\1\37\1\110\1\uffff\1\110\4\uffff\1\72\2\uffff\2\110\2\32\1\uffff"+
        "\1\150\14\uffff\1\166\37\uffff\7\12\1\uffff\3\12\3\uffff\10\12\1"+
        "\uffff\3\12\135\uffff";
    static final String DFA10_maxS =
        "\1\u010a\1\u0112\1\uffff\1\u0112\4\uffff\1\u00fb\2\uffff\2\u00f0"+
        "\2\u0117\1\uffff\1\u00f0\14\uffff\1\u009f\37\uffff\7\u011b\1\uffff"+
        "\3\u011b\3\uffff\1\u00ae\7\u011b\1\uffff\3\u011b\135\uffff";
    static final String DFA10_acceptS =
        "\2\uffff\1\2\1\uffff\1\6\1\7\1\10\2\uffff\1\12\1\23\4\uffff\1\42"+
        "\1\uffff\1\17\1\30\1\1\1\uffff\1\4\1\uffff\1\13\1\uffff\1\15\1\5"+
        "\1\14\1\20\1\uffff\1\31\1\3\1\uffff\1\21\1\11\12\uffff\1\34\1\35"+
        "\1\36\1\37\1\43\5\uffff\1\24\1\26\1\uffff\1\25\1\27\10\uffff\1\32"+
        "\3\uffff\1\40\12\uffff\1\33\3\uffff\1\41\3\uffff\1\16\3\uffff\1"+
        "\22\2\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff"+
        "\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff"+
        "\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff"+
        "\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff\1\32\1\uffff"+
        "\1\32\4\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff"+
        "\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff"+
        "\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff"+
        "\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff\1\33\1\uffff"+
        "\1\33";
    static final String DFA10_specialS =
        "\u00b4\uffff}>";
    static final String[] DFA10_transitionS = {
            "\1\5\1\12\41\uffff\1\1\21\uffff\2\6\6\uffff\1\3\33\uffff\1\15"+
            "\42\uffff\1\13\11\uffff\1\11\54\uffff\1\16\16\uffff\1\17\2\uffff"+
            "\1\10\32\uffff\1\4\6\uffff\1\14\3\uffff\1\2",
            "\1\23\37\uffff\1\25\15\uffff\1\31\13\uffff\1\21\54\uffff\1"+
            "\27\46\uffff\1\22\4\uffff\1\23\24\uffff\1\25\3\uffff\1\20\35"+
            "\uffff\1\27",
            "",
            "\1\37\55\uffff\1\41\13\uffff\1\34\123\uffff\1\36\4\uffff\1"+
            "\37\24\uffff\1\32\3\uffff\1\35\35\uffff\1\33",
            "",
            "",
            "",
            "",
            "\1\42\2\uffff\1\42\2\uffff\1\42\1\uffff\1\42\2\uffff\1\61\3"+
            "\uffff\1\42\51\uffff\1\42\3\uffff\1\42\1\55\11\uffff\2\42\30"+
            "\uffff\1\42\36\uffff\1\42\5\uffff\1\57\24\uffff\1\56\1\60\4"+
            "\uffff\1\42\23\uffff\2\42\1\uffff\1\42\7\uffff\1\42",
            "",
            "",
            "\1\70\u0092\uffff\1\70\24\uffff\1\67",
            "\1\73\u0092\uffff\1\73\24\uffff\1\72",
            "\4\110\1\75\1\76\1\110\1\uffff\17\110\2\uffff\1\110\1\uffff"+
            "\4\110\1\uffff\6\110\1\uffff\1\110\1\100\1\uffff\1\110\1\uffff"+
            "\2\110\1\uffff\10\110\1\107\7\110\1\uffff\2\110\1\101\1\110"+
            "\1\uffff\1\110\1\uffff\1\110\1\uffff\4\110\1\uffff\10\110\1"+
            "\uffff\3\110\1\uffff\1\110\1\uffff\4\110\1\uffff\2\110\1\uffff"+
            "\3\110\1\102\5\110\1\106\6\110\1\uffff\4\110\1\uffff\6\110\1"+
            "\103\3\110\2\uffff\3\110\1\uffff\3\110\1\uffff\4\110\1\uffff"+
            "\1\110\1\uffff\5\110\1\uffff\2\110\1\uffff\5\110\2\uffff\14"+
            "\110\1\uffff\20\110\1\104\7\110\1\105\14\110\1\uffff\3\110\1"+
            "\uffff\5\110\1\uffff\4\110\1\uffff\3\110\1\uffff\3\110\1\77"+
            "\7\110\1\uffff\1\110\2\uffff\1\110\1\uffff\1\110",
            "\4\127\1\114\1\115\1\127\1\uffff\17\127\2\uffff\1\127\1\uffff"+
            "\4\127\1\uffff\6\127\1\uffff\1\127\1\117\1\uffff\1\127\1\uffff"+
            "\2\127\1\uffff\10\127\1\126\7\127\1\uffff\2\127\1\120\1\127"+
            "\1\uffff\1\127\1\uffff\1\127\1\uffff\4\127\1\uffff\10\127\1"+
            "\uffff\3\127\1\uffff\1\127\1\uffff\1\127\1\113\2\127\1\uffff"+
            "\2\127\1\uffff\3\127\1\121\5\127\1\125\6\127\1\uffff\4\127\1"+
            "\uffff\6\127\1\122\3\127\2\uffff\3\127\1\uffff\3\127\1\uffff"+
            "\4\127\1\uffff\1\127\1\uffff\5\127\1\uffff\2\127\1\uffff\5\127"+
            "\2\uffff\14\127\1\uffff\20\127\1\123\7\127\1\124\14\127\1\uffff"+
            "\3\127\1\uffff\5\127\1\uffff\4\127\1\uffff\3\127\1\uffff\3\127"+
            "\1\116\7\127\1\uffff\1\127\2\uffff\1\127\1\uffff\1\127",
            "",
            "\1\25\15\uffff\1\31\50\uffff\1\133\120\uffff\1\25",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "\1\41\50\uffff\1\137",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "\1\142\u00a2\uffff\1\104\113\uffff\1\144\41\uffff\1\104",
            "\1\146\u00a2\uffff\1\104\113\uffff\1\150\41\uffff\1\104",
            "\1\152\u00a2\uffff\1\104\113\uffff\1\154\41\uffff\1\104",
            "\1\156\u00a2\uffff\1\104\113\uffff\1\160\41\uffff\1\104",
            "\1\162\u00a2\uffff\1\104\113\uffff\1\164\41\uffff\1\104",
            "\1\166\u00a2\uffff\1\104\113\uffff\1\170\41\uffff\1\104",
            "\1\172\u00a2\uffff\1\104\113\uffff\1\174\41\uffff\1\104",
            "",
            "\1\176\u00a2\uffff\1\104\113\uffff\1\u0080\41\uffff\1\104",
            "\1\u0082\u00a2\uffff\1\104\113\uffff\1\u0084\41\uffff\1\104",
            "\1\u0086\u00a2\uffff\1\104\113\uffff\1\u0088\41\uffff\1\104",
            "",
            "",
            "",
            "\1\127\151\uffff\1\127\71\uffff\1\123",
            "\1\u008d\151\uffff\1\u008f\70\uffff\1\123\155\uffff\1\123",
            "\1\u0091\151\uffff\1\u0093\70\uffff\1\123\155\uffff\1\123",
            "\1\u0095\151\uffff\1\u0097\70\uffff\1\123\155\uffff\1\123",
            "\1\u0099\151\uffff\1\u009b\70\uffff\1\123\155\uffff\1\123",
            "\1\u009d\151\uffff\1\u009f\70\uffff\1\123\155\uffff\1\123",
            "\1\u00a1\151\uffff\1\u00a3\70\uffff\1\123\155\uffff\1\123",
            "\1\u00a5\151\uffff\1\u00a7\70\uffff\1\123\155\uffff\1\123",
            "",
            "\1\u00a9\151\uffff\1\u00ab\70\uffff\1\123\155\uffff\1\123",
            "\1\u00ad\151\uffff\1\u00af\70\uffff\1\123\155\uffff\1\123",
            "\1\u00b1\151\uffff\1\u00b3\70\uffff\1\123\155\uffff\1\123",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            "",
            ""
    };

    static final short[] DFA10_eot = DFA.unpackEncodedString(DFA10_eotS);
    static final short[] DFA10_eof = DFA.unpackEncodedString(DFA10_eofS);
    static final char[] DFA10_min = DFA.unpackEncodedStringToUnsignedChars(DFA10_minS);
    static final char[] DFA10_max = DFA.unpackEncodedStringToUnsignedChars(DFA10_maxS);
    static final short[] DFA10_accept = DFA.unpackEncodedString(DFA10_acceptS);
    static final short[] DFA10_special = DFA.unpackEncodedString(DFA10_specialS);
    static final short[][] DFA10_transition;

    static {
        int numStates = DFA10_transitionS.length;
        DFA10_transition = new short[numStates][];
        for (int i=0; i




© 2015 - 2024 Weber Informatics LLC | Privacy Policy