All Downloads are FREE. Search and download functionalities are using the official Maven repository.
Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.hive.ql.parse.ParseContext Maven / Gradle / Ivy
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import java.io.Serializable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.QueryProperties;
import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.GroupByOperator;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.ListSinkOperator;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.LineageInfo;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
/**
* Parse Context: The current parse context. This is passed to the optimizer
* which then transforms the operator tree using the parse context. All the
* optimizations are performed sequentially and then the new parse context
* populated. Note that since the parse context contains the operator tree, it
* can be easily retrieved by the next optimization step or finally for task
* generation after the plan has been completely optimized.
*
**/
public class ParseContext {
private QB qb;
private ASTNode ast;
private HashMap opToPartPruner;
private HashMap opToPartList;
private HashMap opToSamplePruner;
private Map> opToPartToSkewedPruner;
private HashMap> topOps;
private HashMap> topSelOps;
private LinkedHashMap, OpParseContext> opParseCtx;
private Map joinContext;
private Map mapJoinContext;
private Map smbMapJoinContext;
private HashMap topToTable;
private Map fsopToTable;
private List reduceSinkOperatorsAddedByEnforceBucketingSorting;
private HashMap> topToProps;
private HashMap nameToSplitSample;
private List loadTableWork;
private List loadFileWork;
private Context ctx;
private HiveConf conf;
private HashMap idToTableNameMap;
private int destTableId;
private UnionProcContext uCtx;
private List> listMapJoinOpsNoReducer; // list of map join
// operators with no
// reducer
private Map> groupOpToInputTables;
private Map prunedPartitions;
private Map viewAliasToInput;
/**
* The lineage information.
*/
private LineageInfo lInfo;
private GlobalLimitCtx globalLimitCtx;
private HashSet semanticInputs;
private List> rootTasks;
private FetchTask fetchTask;
private QueryProperties queryProperties;
private TableDesc fetchTabledesc;
private Operator> fetchSource;
private ListSinkOperator fetchSink;
public ParseContext() {
}
/**
* @param conf
* @param qb
* current QB
* @param ast
* current parse tree
* @param opToPartPruner
* map from table scan operator to partition pruner
* @param opToPartList
* @param topOps
* list of operators for the top query
* @param topSelOps
* list of operators for the selects introduced for column pruning
* @param opParseCtx
* operator parse context - contains a mapping from operator to
* operator parse state (row resolver etc.)
* @param joinContext
* context needed join processing (map join specifically)
* @param topToTable
* the top tables being processed
* @param loadTableWork
* list of destination tables being loaded
* @param loadFileWork
* list of destination files being loaded
* @param ctx
* parse context
* @param idToTableNameMap
* @param uCtx
* @param destTableId
* @param listMapJoinOpsNoReducer
* list of map join operators with no reducer
* @param groupOpToInputTables
* @param prunedPartitions
* @param opToSamplePruner
* operator to sample pruner map
* @param globalLimitCtx
* @param nameToSplitSample
* @param rootTasks
*/
public ParseContext(
HiveConf conf,
QB qb,
ASTNode ast,
HashMap opToPartPruner,
HashMap opToPartList,
HashMap> topOps,
HashMap> topSelOps,
LinkedHashMap, OpParseContext> opParseCtx,
Map joinContext,
Map smbMapJoinContext,
HashMap topToTable,
HashMap> topToProps,
Map fsopToTable,
List loadTableWork, List loadFileWork,
Context ctx, HashMap idToTableNameMap, int destTableId,
UnionProcContext uCtx, List> listMapJoinOpsNoReducer,
Map> groupOpToInputTables,
Map prunedPartitions,
HashMap opToSamplePruner,
GlobalLimitCtx globalLimitCtx,
HashMap nameToSplitSample,
HashSet semanticInputs, List> rootTasks,
Map> opToPartToSkewedPruner,
Map viewAliasToInput,
List reduceSinkOperatorsAddedByEnforceBucketingSorting,
QueryProperties queryProperties) {
this.conf = conf;
this.qb = qb;
this.ast = ast;
this.opToPartPruner = opToPartPruner;
this.opToPartList = opToPartList;
this.joinContext = joinContext;
this.smbMapJoinContext = smbMapJoinContext;
this.topToTable = topToTable;
this.fsopToTable = fsopToTable;
this.topToProps = topToProps;
this.loadFileWork = loadFileWork;
this.loadTableWork = loadTableWork;
this.opParseCtx = opParseCtx;
this.topOps = topOps;
this.topSelOps = topSelOps;
this.ctx = ctx;
this.idToTableNameMap = idToTableNameMap;
this.destTableId = destTableId;
this.uCtx = uCtx;
this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer;
this.groupOpToInputTables = groupOpToInputTables;
this.prunedPartitions = prunedPartitions;
this.opToSamplePruner = opToSamplePruner;
this.nameToSplitSample = nameToSplitSample;
this.globalLimitCtx = globalLimitCtx;
this.semanticInputs = semanticInputs;
this.rootTasks = rootTasks;
this.opToPartToSkewedPruner = opToPartToSkewedPruner;
this.viewAliasToInput = viewAliasToInput;
this.reduceSinkOperatorsAddedByEnforceBucketingSorting =
reduceSinkOperatorsAddedByEnforceBucketingSorting;
this.queryProperties = queryProperties;
}
/**
* @return the qb
*/
public QB getQB() {
return qb;
}
/**
* @param qb
* the qb to set
*/
public void setQB(QB qb) {
this.qb = qb;
}
/**
* @return the context
*/
public Context getContext() {
return ctx;
}
/**
* @param ctx
* the context to set
*/
public void setContext(Context ctx) {
this.ctx = ctx;
}
/**
* @return the hive conf
*/
public HiveConf getConf() {
return conf;
}
/**
* @param conf
* the conf to set
*/
public void setConf(HiveConf conf) {
this.conf = conf;
}
/**
* @return the ast
*/
public ASTNode getParseTree() {
return ast;
}
/**
* @param ast
* the parsetree to set
*/
public void setParseTree(ASTNode ast) {
this.ast = ast;
}
/**
* @return the opToPartPruner
*/
public HashMap getOpToPartPruner() {
return opToPartPruner;
}
/**
* @param opToPartPruner
* the opToPartPruner to set
*/
public void setOpToPartPruner(
HashMap opToPartPruner) {
this.opToPartPruner = opToPartPruner;
}
public HashMap getOpToPartList() {
return opToPartList;
}
/**
* @return the topToTable
*/
public HashMap getTopToTable() {
return topToTable;
}
/**
* @param topToTable
* the topToTable to set
*/
public void setTopToTable(HashMap topToTable) {
this.topToTable = topToTable;
}
public Map getFsopToTable() {
return fsopToTable;
}
public void setFsopToTable(Map fsopToTable) {
this.fsopToTable = fsopToTable;
}
public List getReduceSinkOperatorsAddedByEnforceBucketingSorting() {
return reduceSinkOperatorsAddedByEnforceBucketingSorting;
}
public void setReduceSinkOperatorsAddedByEnforceBucketingSorting(
List reduceSinkOperatorsAddedByEnforceBucketingSorting) {
this.reduceSinkOperatorsAddedByEnforceBucketingSorting =
reduceSinkOperatorsAddedByEnforceBucketingSorting;
}
/**
* @return the topToProps
*/
public HashMap> getTopToProps() {
return topToProps;
}
/**
* @param topToProps
* the topToProps to set
*/
public void setTopToProps(HashMap> topToProps) {
this.topToProps = topToProps;
}
/**
* @return the topOps
*/
public HashMap> getTopOps() {
return topOps;
}
/**
* @param topOps
* the topOps to set
*/
public void setTopOps(HashMap> topOps) {
this.topOps = topOps;
}
/**
* @return the topSelOps
*/
public HashMap> getTopSelOps() {
return topSelOps;
}
/**
* @param topSelOps
* the topSelOps to set
*/
public void setTopSelOps(
HashMap> topSelOps) {
this.topSelOps = topSelOps;
}
/**
* @return the opParseCtx
*/
public LinkedHashMap, OpParseContext> getOpParseCtx() {
return opParseCtx;
}
/**
* Remove the OpParseContext of a specific operator op
* @param op
* @return
*/
public OpParseContext removeOpParseCtx(Operator extends OperatorDesc> op) {
return opParseCtx.remove(op);
}
/**
* Update the OpParseContext of operator op to newOpParseContext.
* If op is not in opParseCtx, a new entry will be added into opParseCtx.
* The key is op, and the value is newOpParseContext.
* @param op
* @param newOpParseContext
*/
public void updateOpParseCtx(Operator extends OperatorDesc> op,
OpParseContext newOpParseContext) {
opParseCtx.put(op, newOpParseContext);
}
/**
* @param opParseCtx
* the opParseCtx to set
*/
public void setOpParseCtx(
LinkedHashMap, OpParseContext> opParseCtx) {
this.opParseCtx = opParseCtx;
}
public HashMap getNameToSplitSample() {
return nameToSplitSample;
}
public void setNameToSplitSample(HashMap nameToSplitSample) {
this.nameToSplitSample = nameToSplitSample;
}
/**
* @return the loadTableWork
*/
public List getLoadTableWork() {
return loadTableWork;
}
/**
* @param loadTableWork
* the loadTableWork to set
*/
public void setLoadTableWork(List loadTableWork) {
this.loadTableWork = loadTableWork;
}
/**
* @return the loadFileWork
*/
public List getLoadFileWork() {
return loadFileWork;
}
/**
* @param loadFileWork
* the loadFileWork to set
*/
public void setLoadFileWork(List loadFileWork) {
this.loadFileWork = loadFileWork;
}
public HashMap getIdToTableNameMap() {
return idToTableNameMap;
}
public void setIdToTableNameMap(HashMap idToTableNameMap) {
this.idToTableNameMap = idToTableNameMap;
}
public int getDestTableId() {
return destTableId;
}
public void setDestTableId(int destTableId) {
this.destTableId = destTableId;
}
public UnionProcContext getUCtx() {
return uCtx;
}
public void setUCtx(UnionProcContext uCtx) {
this.uCtx = uCtx;
}
/**
* @return the joinContext
*/
public Map getJoinContext() {
return joinContext;
}
/**
* @param joinContext
* the joinContext to set
*/
public void setJoinContext(Map joinContext) {
this.joinContext = joinContext;
}
/**
* @return the listMapJoinOpsNoReducer
*/
public List> getListMapJoinOpsNoReducer() {
return listMapJoinOpsNoReducer;
}
/**
* @param listMapJoinOpsNoReducer
* the listMapJoinOpsNoReducer to set
*/
public void setListMapJoinOpsNoReducer(
List> listMapJoinOpsNoReducer) {
this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer;
}
/**
* @return the opToSamplePruner
*/
public HashMap getOpToSamplePruner() {
return opToSamplePruner;
}
/**
* @param opToSamplePruner
* the opToSamplePruner to set
*/
public void setOpToSamplePruner(
HashMap opToSamplePruner) {
this.opToSamplePruner = opToSamplePruner;
}
/**
* @return the groupOpToInputTables
*/
public Map> getGroupOpToInputTables() {
return groupOpToInputTables;
}
/**
* @param groupOpToInputTables
*/
public void setGroupOpToInputTables(
Map> groupOpToInputTables) {
this.groupOpToInputTables = groupOpToInputTables;
}
/**
* @return pruned partition map
*/
public Map getPrunedPartitions() {
return prunedPartitions;
}
/**
* @param prunedPartitions
*/
public void setPrunedPartitions(
Map prunedPartitions) {
this.prunedPartitions = prunedPartitions;
}
/**
* Sets the lineage information.
*
* @param lInfo The lineage information.
*/
public void setLineageInfo(LineageInfo lInfo) {
this.lInfo = lInfo;
}
/**
* Gets the associated lineage information.
*
* @return LineageInfo
*/
public LineageInfo getLineageInfo() {
return lInfo;
}
public Map getMapJoinContext() {
return mapJoinContext;
}
public void setMapJoinContext(Map mapJoinContext) {
this.mapJoinContext = mapJoinContext;
}
public Map getSmbMapJoinContext() {
return smbMapJoinContext;
}
public void setSmbMapJoinContext(Map smbMapJoinContext) {
this.smbMapJoinContext = smbMapJoinContext;
}
public GlobalLimitCtx getGlobalLimitCtx() {
return globalLimitCtx;
}
public void setGlobalLimitCtx(GlobalLimitCtx globalLimitCtx) {
this.globalLimitCtx = globalLimitCtx;
}
public HashSet getSemanticInputs() {
return semanticInputs;
}
public void replaceRootTask(Task extends Serializable> rootTask,
List extends Task extends Serializable>> tasks) {
this.rootTasks.remove(rootTask);
this.rootTasks.addAll(tasks);
}
public FetchTask getFetchTask() {
return fetchTask;
}
public void setFetchTask(FetchTask fetchTask) {
this.fetchTask = fetchTask;
}
public PrunedPartitionList getPrunedPartitions(String alias, TableScanOperator ts)
throws SemanticException {
PrunedPartitionList partsList = opToPartList.get(ts);
if (partsList == null) {
partsList = PartitionPruner.prune(ts, this, alias);
opToPartList.put(ts, partsList);
}
return partsList;
}
/**
* @return the opToPartToSkewedPruner
*/
public Map> getOpToPartToSkewedPruner() {
return opToPartToSkewedPruner;
}
/**
* @param opToPartToSkewedPruner
* the opToSkewedPruner to set
*/
public void setOpPartToSkewedPruner(
HashMap> opToPartToSkewedPruner) {
this.opToPartToSkewedPruner = opToPartToSkewedPruner;
}
public Map getViewAliasToInput() {
return viewAliasToInput;
}
public QueryProperties getQueryProperties() {
return queryProperties;
}
public void setQueryProperties(QueryProperties queryProperties) {
this.queryProperties = queryProperties;
}
public TableDesc getFetchTabledesc() {
return fetchTabledesc;
}
public void setFetchTabledesc(TableDesc fetchTabledesc) {
this.fetchTabledesc = fetchTabledesc;
}
public Operator> getFetchSource() {
return fetchSource;
}
public void setFetchSource(Operator> fetchSource) {
this.fetchSource = fetchSource;
}
public ListSinkOperator getFetchSink() {
return fetchSink;
}
public void setFetchSink(ListSinkOperator fetchSink) {
this.fetchSink = fetchSink;
}
}