All Downloads are FREE. Search and download functionalities are using the official Maven repository.

oracle.kv.hadoop.hive.table.TableTaskAttemptContext Maven / Gradle / Ivy

/*-
 * Copyright (C) 2011, 2018 Oracle and/or its affiliates. All rights reserved.
 *
 * This file was distributed by Oracle as part of a version of Oracle NoSQL
 * Database made available at:
 *
 * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
 *
 * Please see the LICENSE file included in the top-level directory of the
 * appropriate version of Oracle NoSQL Database for a copy of the license and
 * additional information.
 */

package oracle.kv.hadoop.hive.table;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;

/**
 * Concrete implementation of Hadoop TaskAttemptContext interface. When
 * initializing a YARN based (MapReduce version 2) RecordReader, an
 * instance of TaskAttemptContext must be specified. This class is intended
 * to satisfy that requirement.
 *
 * Note that the RecordReader provided for Hadoop integration is YARN based,
 * whereas the Hive infrastructure requires a RecordReader based on version 1
 * MapReduce. To address this incompatibility when integrating with HIVE,
 * a version 1 based RecordReader is used, but wraps and delegates to a
 * version 2 RecordReader; thus, the need for this class. Currently, the
 * methods of this class are "stubbed out" and/or return default values;
 * which seems to be acceptable for the Hive integration.
 */
public class TableTaskAttemptContext implements TaskAttemptContext {

    private Job job;
    private String currentStatus = "UNKNOWN";

    @SuppressWarnings("deprecation")
    public TableTaskAttemptContext(Configuration jobConf) throws IOException {
        this.job = new Job(jobConf);
    }

    /*
     * Specified by TaskAttemptContext interface.
     */
    @Override
    public Counter getCounter(Enum counterName) {
        return null;
    }

    @Override
    public Counter getCounter(String groupName, String counterName) {
        return null;
    }

    @Override
    public float getProgress() {
        return 0.0f;
    }

    @Override
    public String getStatus() {
        return currentStatus;
    }

    @Override
    public TaskAttemptID getTaskAttemptID() {
        return null;
    }

    @Override
    public void setStatus(String status) {
        currentStatus = status;
    }

    /**
     * Specified by Progressable interface
     * (TaskAttemptContext extends Progressable).
     */
    @Override
    public void progress() {
        /* No-op */
    }

    /**
     * Specified by JobContext interface
     * (TaskAttemptContext extends JobContext).
     *
     * Note that whereas it would be preferrable to define this class
     * to implement TaskAttemptContext and extend the
     * Job class (so that this class inherits the
     * implementations of all of the methods defined below), that cannot
     * be done. This is because the Job class implements
     * the method JobStatus getStatus, while the
     * TaskAttemptContext interface specifies the method
     * String getStatus. As a result, a sub-class of
     * Job cannot implement TaskAttemptContext;
     * because a method implementation that satisfies the
     * getStatus specification in TaskAttemptContext
     * cannot override the implementation of getStatus
     * provided by Job.
     */
    @Override
    public Path[] getArchiveClassPaths() {
        return job.getArchiveClassPaths();
    }

    @Override
    public String[] getArchiveTimestamps() {
        return job.getArchiveTimestamps();
    }

    @Override
    public URI[] getCacheArchives() throws IOException {
        return job.getCacheArchives();
    }

    @Override
    public URI[] getCacheFiles() throws IOException {
        return job.getCacheFiles();
    }

    @Override
    public Class> getCombinerClass()
        throws ClassNotFoundException {
        return job.getCombinerClass();
    }

    @Override
    public RawComparator getCombinerKeyGroupingComparator() {
        return job.getCombinerKeyGroupingComparator();
    }

    @Override
    public Configuration getConfiguration() {
        return job.getConfiguration();
    }

    @Override
    public org.apache.hadoop.security.Credentials getCredentials() {
        return job.getCredentials();
    }

    @Override
    public Path[] getFileClassPaths() {
        return job.getFileClassPaths();
    }

    @Override
    public String[] getFileTimestamps() {
        return job.getFileTimestamps();
    }

    @Override
    public RawComparator getGroupingComparator() {
        return job.getGroupingComparator();
    }

    @Override
    public Class> getInputFormatClass()
        throws ClassNotFoundException {
        return job.getInputFormatClass();
    }

    @Override
    public String getJar() {
        return job.getJar();
    }

    @Override
    public JobID getJobID() {
        return job.getJobID();
    }

    @Override
    public String getJobName() {
        return job.getJobName();
    }

    @Override
    public boolean getJobSetupCleanupNeeded() {
        return job.getJobSetupCleanupNeeded();
    }

    @Override
    public Path[] getLocalCacheArchives() throws IOException {
        return job.getLocalCacheArchives();
    }

    @Override
    public Path[] getLocalCacheFiles() throws IOException {
        return job.getLocalCacheFiles();
    }

    @Override
    public Class getMapOutputKeyClass() {
        return job.getMapOutputKeyClass();
    }

    @Override
    public Class getMapOutputValueClass() {
        return job.getMapOutputValueClass();
    }

    @Override
    public Class> getMapperClass()
        throws ClassNotFoundException {
        return job.getMapperClass();
    }

    @Override
    public int getMaxMapAttempts() {
        return job.getMaxMapAttempts();
    }

    @Override
    public int getMaxReduceAttempts() {
        return job.getMaxReduceAttempts();
    }

    @Override
    public int getNumReduceTasks() {
        return job.getNumReduceTasks();
    }

    @Override
    public Class> getOutputFormatClass()
        throws ClassNotFoundException {
        return job.getOutputFormatClass();
    }

    @Override
    public Class getOutputKeyClass() {
        return job.getOutputKeyClass();
    }

    @Override
    public Class getOutputValueClass() {
        return job.getOutputValueClass();
    }

    @Override
    public Class> getPartitionerClass()
        throws ClassNotFoundException {
        return job.getPartitionerClass();
    }

    @Override
    public boolean getProfileEnabled() {
        return job.getProfileEnabled();
    }

    @Override
    public String getProfileParams() {
        return job.getProfileParams();
    }

    @Override
    public Configuration.IntegerRanges getProfileTaskRange(boolean isMap) {
        return job.getProfileTaskRange(isMap);
    }

    @Override
    public Class> getReducerClass()
        throws ClassNotFoundException {
        return job.getReducerClass();
    }

    @Override
    public RawComparator getSortComparator() {
        return job.getSortComparator();
    }

    @Override
    public boolean getSymlink() {
        return job.getSymlink();
    }

    @Override
    public boolean getTaskCleanupNeeded() {
        return job.getTaskCleanupNeeded();
    }

    @Override
    public String getUser() {
        return job.getUser();
    }

    @Override
    public Path getWorkingDirectory() throws IOException {
        return job.getWorkingDirectory();
    }


    @Override
    public boolean userClassesTakesPrecedence() {
        return job.userClassesTakesPrecedence();
    }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy