org.apache.hadoop.hive.ql.parse.MacroSemanticAnalyzer Maven / Gradle / Ivy
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFEXISTS;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;
import java.util.LinkedHashSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.FunctionUtils;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.FunctionWork;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
/**
* MacroSemanticAnalyzer.
*
*/
public class MacroSemanticAnalyzer extends BaseSemanticAnalyzer {
private static final Logger LOG = LoggerFactory
.getLogger(MacroSemanticAnalyzer.class);
public MacroSemanticAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
}
@Override
public void analyzeInternal(ASTNode ast) throws SemanticException {
if (ast.getToken().getType() == HiveParser.TOK_CREATEMACRO) {
LOG.debug("Analyzing create macro " + ast.dump());
analyzeCreateMacro(ast);
}
if (ast.getToken().getType() == HiveParser.TOK_DROPMACRO) {
LOG.debug("Analyzing drop macro " + ast.dump());
analyzeDropMacro(ast);
}
}
@SuppressWarnings("unchecked")
private void analyzeCreateMacro(ASTNode ast) throws SemanticException {
String functionName = ast.getChild(0).getText();
// Temp macros are not allowed to have qualified names.
if (FunctionUtils.isQualifiedFunctionName(functionName)) {
throw new SemanticException("Temporary macro cannot be created with a qualified name.");
}
List arguments =
BaseSemanticAnalyzer.getColumns((ASTNode)ast.getChild(1), true);
boolean isNoArgumentMacro = arguments.size() == 0;
RowResolver rowResolver = new RowResolver();
ArrayList macroColNames = new ArrayList(arguments.size());
ArrayList macroColTypes = new ArrayList(arguments.size());
final Set actualColumnNames = new HashSet();
if(!isNoArgumentMacro) {
/*
* Walk down expression to see which arguments are actually used.
*/
Node expression = (Node) ast.getChild(2);
PreOrderWalker walker = new PreOrderWalker(new Dispatcher() {
@Override
public Object dispatch(Node nd, Stack stack, Object... nodeOutputs)
throws SemanticException {
if(nd instanceof ASTNode) {
ASTNode node = (ASTNode)nd;
if(node.getType() == HiveParser.TOK_TABLE_OR_COL) {
actualColumnNames.add(node.getChild(0).getText());
}
}
return null;
}
});
walker.startWalking(Collections.singletonList(expression), null);
}
for (FieldSchema argument : arguments) {
TypeInfo colType =
TypeInfoUtils.getTypeInfoFromTypeString(argument.getType());
rowResolver.put("", argument.getName(),
new ColumnInfo(argument.getName(), colType, "", false));
macroColNames.add(argument.getName());
macroColTypes.add(colType);
}
Set expectedColumnNames = new LinkedHashSet(macroColNames);
if(!expectedColumnNames.equals(actualColumnNames)) {
throw new SemanticException("Expected columns " + expectedColumnNames + " but found "
+ actualColumnNames);
}
if(expectedColumnNames.size() != macroColNames.size()) {
throw new SemanticException("At least one parameter name was used more than once "
+ macroColNames);
}
SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ?
new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
;
ExprNodeDesc body;
if(isNoArgumentMacro) {
body = sa.genExprNodeDesc((ASTNode)ast.getChild(1), rowResolver);
} else {
body = sa.genExprNodeDesc((ASTNode)ast.getChild(2), rowResolver);
}
CreateMacroDesc desc = new CreateMacroDesc(functionName, macroColNames, macroColTypes, body);
rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
addEntities();
}
@SuppressWarnings("unchecked")
private void analyzeDropMacro(ASTNode ast) throws SemanticException {
String functionName = ast.getChild(0).getText();
boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
// we want to signal an error if the function doesn't exist and we're
// configured not to ignore this
boolean throwException =
!ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
// Temp macros are not allowed to have qualified names.
if (FunctionUtils.isQualifiedFunctionName(functionName)) {
throw new SemanticException("Temporary macro name cannot be a qualified name.");
}
if (throwException && FunctionRegistry.getFunctionInfo(functionName) == null) {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
}
DropMacroDesc desc = new DropMacroDesc(functionName);
rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
addEntities();
}
private void addEntities() throws SemanticException {
Database database = getDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
// This restricts macro creation to privileged users.
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
}