All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.hive.ql.exec.vector.VectorColumnAssignFactory Maven / Gradle / Ivy

There is a newer version: 4.0.0
Show newest version
/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.hadoop.hive.ql.exec.vector;

import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hive.common.util.DateUtils;

/**
 * This class is used as a static factory for VectorColumnAssign.
 * Is capable of building assigners from expression nodes or from object inspectors.
 */
public class VectorColumnAssignFactory {

  private static abstract class VectorColumnAssignVectorBase
    implements VectorColumnAssign {
    protected VectorizedRowBatch outBatch;
    protected T outCol;

    protected void copyValue(T in, int srcIndex, int destIndex) throws HiveException {
      throw new HiveException("Internal error: should not reach here");
    }

    @SuppressWarnings("unchecked")
    @Override
    public void assignVectorValue(VectorizedRowBatch inBatch, int batchIndex,
        int valueColumnIndex, int destIndex) throws HiveException {
      T in = (T) inBatch.cols[valueColumnIndex];
      if (in.isRepeating) {
        if (in.noNulls) {
          copyValue(in, 0, destIndex);
        }
        else {
          assignNull(destIndex);
        }
      }
      else {
        int srcIndex  = inBatch.selectedInUse ? inBatch.selected[batchIndex] : batchIndex;
        if (in.noNulls || !in.isNull[srcIndex]) {
          copyValue(in, srcIndex, destIndex);
        }
        else {
          assignNull(destIndex);
        }
      }
    }

    public VectorColumnAssign init(VectorizedRowBatch out, T cv) {
      this.outBatch = out;
      this.outCol = cv;
      return this;
    }

    protected void assignNull(int index) {
      VectorizedBatchUtil.setNullColIsNullValue(outCol, index);
    }

    @Override
    public void reset() {
    }

    @Override
    public void assignObjectValue(Object value, int destIndex) throws HiveException {
      throw new HiveException("Internal error: should not reach here");
    }
  }

  private static abstract class VectorLongColumnAssign
    extends VectorColumnAssignVectorBase {
    protected void assignLong(long value, int destIndex) {
      outCol.vector[destIndex] = value;
    }
  }

  private static abstract class VectorDoubleColumnAssign
    extends VectorColumnAssignVectorBase {

    protected void assignDouble(double value, int destIndex) {
      outCol.vector[destIndex] = value;
    }
  }

  private static abstract class VectorBytesColumnAssign
    extends VectorColumnAssignVectorBase {
    byte[] pad = new byte[BytesColumnVector.DEFAULT_BUFFER_SIZE];
    int padUsed = 0;

    protected void assignBytes(byte[] buffer, int start, int length, int destIndex) {
      if (padUsed + length <= pad.length) {
        System.arraycopy(buffer, start,
            pad, padUsed, length);
        outCol.vector[destIndex] = pad;
        outCol.start[destIndex] = padUsed;
        outCol.length[destIndex] = length;
        padUsed += length;
      }
      else {
        outCol.vector[destIndex] = Arrays.copyOfRange(buffer,
            start, length);
        outCol.start[destIndex] = 0;
        outCol.length[destIndex] = length;
      }
    }

    @Override
    public void reset() {
      super.reset();
      padUsed = 0;
    }
  }

  private static abstract class VectorDecimalColumnAssign
  extends VectorColumnAssignVectorBase {

    protected void assignDecimal(HiveDecimal value, int index) {
      outCol.set(index, value);
    }
    protected void assignDecimal(HiveDecimalWritable hdw, int index) {
      outCol.set(index, hdw);
    }
  }


  public static VectorColumnAssign[] buildAssigners(VectorizedRowBatch outputBatch)
      throws HiveException {
    VectorColumnAssign[] vca = new VectorColumnAssign[outputBatch.cols.length];
    for(int i=0; i columnMap,
      List outputColumnNames) throws HiveException {
    StructObjectInspector soi = (StructObjectInspector) outputOI;
    VectorColumnAssign[] vcas = new VectorColumnAssign[outputColumnNames.size()];
    for (int i=0; i




© 2015 - 2024 Weber Informatics LLC | Privacy Policy