org.apache.hadoop.hive.ql.exec.TaskFactory Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hive-exec Show documentation
Show all versions of hive-exec Show documentation
Hive is a data warehouse infrastructure built on top of Hadoop see
http://wiki.apache.org/hadoop/Hive
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.merge.BlockMergeTask;
import org.apache.hadoop.hive.ql.io.merge.MergeWork;
import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanTask;
import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanWork;
import org.apache.hadoop.hive.ql.plan.ColumnStatsWork;
import org.apache.hadoop.hive.ql.plan.ConditionalWork;
import org.apache.hadoop.hive.ql.plan.CopyWork;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.FunctionWork;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.StatsWork;
/**
* TaskFactory implementation.
**/
public final class TaskFactory {
/**
* taskTuple.
*
* @param
*/
public static final class taskTuple {
public Class workClass;
public Class extends Task> taskClass;
public taskTuple(Class workClass, Class extends Task> taskClass) {
this.workClass = workClass;
this.taskClass = taskClass;
}
}
public static ArrayList> taskvec;
static {
taskvec = new ArrayList>();
taskvec.add(new taskTuple(MoveWork.class, MoveTask.class));
taskvec.add(new taskTuple(FetchWork.class, FetchTask.class));
taskvec.add(new taskTuple(CopyWork.class, CopyTask.class));
taskvec.add(new taskTuple(DDLWork.class, DDLTask.class));
taskvec.add(new taskTuple(FunctionWork.class,
FunctionTask.class));
taskvec
.add(new taskTuple(ExplainWork.class, ExplainTask.class));
taskvec.add(new taskTuple(ConditionalWork.class,
ConditionalTask.class));
taskvec.add(new taskTuple(MapredWork.class,
MapRedTask.class));
taskvec.add(new taskTuple(MapredLocalWork.class,
MapredLocalTask.class));
taskvec.add(new taskTuple(StatsWork.class,
StatsTask.class));
taskvec.add(new taskTuple(ColumnStatsWork.class, ColumnStatsTask.class));
taskvec.add(new taskTuple(MergeWork.class,
BlockMergeTask.class));
taskvec.add(new taskTuple(DependencyCollectionWork.class,
DependencyCollectionTask.class));
taskvec.add(new taskTuple(PartialScanWork.class,
PartialScanTask.class));
}
private static ThreadLocal tid = new ThreadLocal() {
@Override
protected synchronized Integer initialValue() {
return Integer.valueOf(0);
}
};
public static int getAndIncrementId() {
int curValue = tid.get().intValue();
tid.set(new Integer(curValue + 1));
return curValue;
}
public static void resetId() {
tid.set(Integer.valueOf(0));
}
@SuppressWarnings("unchecked")
public static Task get(Class workClass,
HiveConf conf) {
for (taskTuple extends Serializable> t : taskvec) {
if (t.workClass == workClass) {
try {
Task ret = (Task) t.taskClass.newInstance();
ret.setId("Stage-" + Integer.toString(getAndIncrementId()));
return ret;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
throw new RuntimeException("No task for work class " + workClass.getName());
}
public static Task get(T work, HiveConf conf,
Task extends Serializable>... tasklist) {
Task ret = get((Class) work.getClass(), conf);
ret.setWork(work);
if (tasklist.length == 0) {
return (ret);
}
ArrayList> clist = new ArrayList>();
for (Task extends Serializable> tsk : tasklist) {
clist.add(tsk);
}
ret.setChildTasks(clist);
return (ret);
}
public static Task getAndMakeChild(T work,
HiveConf conf, Task extends Serializable>... tasklist) {
Task ret = get((Class) work.getClass(), conf);
ret.setWork(work);
if (tasklist.length == 0) {
return (ret);
}
makeChild(ret, tasklist);
return (ret);
}
public static void makeChild(Task> ret,
Task extends Serializable>... tasklist) {
// Add the new task as child of each of the passed in tasks
for (Task extends Serializable> tsk : tasklist) {
List> children = tsk.getChildTasks();
if (children == null) {
children = new ArrayList>();
}
children.add(ret);
tsk.setChildTasks(children);
}
}
private TaskFactory() {
// prevent instantiation
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy