All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.clickzetta.platform.test.BaseIgsWorker Maven / Gradle / Ivy

There is a newer version: 2.0.0
Show newest version
package com.clickzetta.platform.test;

import com.clickzetta.platform.common.ColumnSchema;
import com.clickzetta.platform.common.Schema;
import com.clickzetta.platform.common.SchemaConvert;
import com.clickzetta.platform.operator.Bytes;
import com.clickzetta.platform.operator.WriteOperation;
import com.clickzetta.platform.util.DateUtil;
import com.google.common.annotations.VisibleForTesting;
import cz.proto.ingestion.IGSWorkerServiceGrpc;
import cz.proto.ingestion.Ingestion;
import io.grpc.stub.StreamObserver;
import org.apache.kudu.RowOperations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.sql.Timestamp;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

@VisibleForTesting
public class BaseIgsWorker extends IGSWorkerServiceGrpc.IGSWorkerServiceImplBase {

  private static final Logger LOG = LoggerFactory.getLogger(BaseIgsWorker.class);

  public static final Map>>> operatorMaps = new ConcurrentHashMap<>();

  public List>> getOperatorsByBatchIds(Long... batchIds) {
    List>> list = new ArrayList<>();
    for (long batchId : batchIds) {
      list.addAll(operatorMaps.get(batchId));
    }
    return list;
  }

  public List>> getAllOperators() {
    List>> result = new ArrayList<>();
    for (List>> list : operatorMaps.values()) {
      result.addAll(list);
    }
    return result;
  }

  public void cleanStatus() {
    operatorMaps.clear();
  }

  public void decodeRequest(Ingestion.DataMutateRequest request) {
    Schema schema = SchemaConvert.getInstance(request.getTableType()).convertToExternalSchema(request.getSchema());
    RowOperations.RowOperationsPB operationsPB = request.getRowOperations();

    byte[] rowData = operationsPB.getRows().toByteArray();
    byte[] indirectData = operationsPB.getIndirectData().toByteArray();
    boolean hasNullableBitSet = schema.hasNullableColumns();

    // decode bytes with target schema format.
    List>> operatorLists = new ArrayList<>();
    int point = 0;
    while (point < rowData.length) {
      WriteOperation.ChangeType operatorType = null;
      for (WriteOperation.ChangeType changeType : WriteOperation.ChangeType.values()) {
        if (changeType.toEncodedByte() == Bytes.getByte(rowData, point)) {
          operatorType = changeType;
          operatorLists.add(new Tuple2<>(operatorType, new ArrayList<>()));
          point++;
          break;
        }
      }
      BitSet columnsBitSet = Bytes.toBitSet(rowData, point, schema.getColumns().size());
      point += Bytes.getBitSetSize(schema.getColumns().size());
      BitSet nullsBitSet = null;
      if (hasNullableBitSet) {
        nullsBitSet = Bytes.toBitSet(rowData, point, schema.getColumns().size());
        point += Bytes.getBitSetSize(schema.getColumns().size());
      }

      for (int i = 0; i < schema.getColumnCount(); i++) {
        ColumnSchema columnSchema = schema.getColumnByIndex(i);
        if (columnsBitSet.get(i)) {
          if (columnSchema.isNullable() && nullsBitSet.get(i)) {
            operatorLists.get(operatorLists.size() - 1)._2.add(null);
          } else {
            Object val = getObject(columnSchema, point, rowData, indirectData);
            operatorLists.get(operatorLists.size() - 1)._2.add(val);
            point += columnSchema.getType().getSize(columnSchema.getTypeAttributes());
          }
        }
      }
    }
    List>> list =
        operatorMaps.computeIfAbsent(request.getBatchId(), s -> new ArrayList<>());
    list.addAll(operatorLists);
  }

  @Override
  public StreamObserver mutate(StreamObserver responseObserver) {
    return new StreamObserver() {
      @Override
      public void onNext(Ingestion.DataMutateRequest request) {
        Ingestion.DataMutateResponse response = Ingestion.DataMutateResponse.newBuilder()
            .setBatchId(request.getBatchId())
            .setStatus(Ingestion.ResponseStatus.newBuilder().setCode(Ingestion.Code.SUCCESS).build())
            .build();
        decodeRequest(request);
        responseObserver.onNext(response);
      }

      @Override
      public void onError(Throwable throwable) {
        responseObserver.onError(throwable);
      }

      @Override
      public void onCompleted() {
        responseObserver.onCompleted();
      }
    };
  }

  public Object getObject(ColumnSchema columnSchema, int point, byte[] rowData, byte[] indirectData) {
    Object val = null;
    try {
      switch (columnSchema.getType()) {
        case BOOL:
          val = Bytes.getBoolean(rowData, point);
          break;
        case INT8:
          val = Bytes.getByte(rowData, point);
          break;
        case INT16:
          val = Bytes.getShort(rowData, point);
          break;
        case INT32:
          val = Bytes.getInt(rowData, point);
          break;
        case INT64:
          val = Bytes.getLong(rowData, point);
          break;
        case UNIXTIME_MICROS:
          val = new Timestamp(Bytes.getLong(rowData, point));
          break;
        case FLOAT:
          val = Bytes.getFloat(rowData, point);
          break;
        case DOUBLE:
          val = Bytes.getDouble(rowData, point);
          break;
        case STRING:
        case VARCHAR:
        case BINARY:
          long offset = Bytes.getLong(rowData, point);
          long length = Bytes.getLong(rowData, point + 8);
          val = Bytes.getString(indirectData, (int) offset, (int) length);
          break;
        case DATE:
          int days = Bytes.getInt(rowData, point);
          val = DateUtil.epochDaysToSqlDate(days);
          break;
        case DECIMAL:
          int precision = columnSchema.getTypeAttributes().getPrecision();
          int scale = columnSchema.getTypeAttributes().getScale();
          val = Bytes.getDecimal(rowData, point, precision, scale);
          break;
        default:
          throw new IllegalArgumentException("Unsupported column type: " + columnSchema.getType());
      }
    } catch (ClassCastException e) {
      throw new IllegalArgumentException(
          "Value type does not match column type " + columnSchema.getType() +
              " for column " + columnSchema.getName());
    }
    return val;
  }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy