All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.aliyun.datahub.client.http.converter.batch.BatchUtil Maven / Gradle / Ivy

There is a newer version: 2.25.6
Show newest version
package com.aliyun.datahub.client.http.converter.batch;

import com.aliyun.datahub.client.http.HttpConfig;
import com.aliyun.datahub.client.model.*;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

public class BatchUtil {
    private final static ThreadLocal FOUR_BYTE_BUFFER = ThreadLocal.withInitial(() -> ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN));
    private final static ThreadLocal EIGHT_BYTE_BUFFER = ThreadLocal.withInitial(() -> ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN));
    private final static ThreadLocal PADDING_BYTES = ThreadLocal.withInitial(() -> new byte[]{0, 0, 0, 0});

    public static int readInt(byte[] buffer, int pos) {
        ByteBuffer byteBuffer = FOUR_BYTE_BUFFER.get();
        byteBuffer.clear();
        byteBuffer.put(buffer, pos, 4);
        byteBuffer.flip();
        return byteBuffer.getInt();
    }

    public static byte[] parseInt(int value) {
        ByteBuffer byteBuffer = FOUR_BYTE_BUFFER.get();
        byteBuffer.clear();
        byteBuffer.putInt(value);
        return byteBuffer.array();
    }

    public static byte[] parseLong(long value) {
        ByteBuffer byteBuffer = EIGHT_BYTE_BUFFER.get();
        byteBuffer.clear();
        byteBuffer.putLong(value);
        return byteBuffer.array();
    }

    public static byte[] parseFloat(float value) {
        ByteBuffer byteBuffer = EIGHT_BYTE_BUFFER.get();
        byteBuffer.clear();
        byteBuffer.putFloat(value);
        byteBuffer.put(PADDING_BYTES.get());
        return byteBuffer.array();
    }

    public static byte[] parseDouble(double value) {
        ByteBuffer byteBuffer = EIGHT_BYTE_BUFFER.get();
        byteBuffer.clear();
        byteBuffer.putDouble(value);
        return byteBuffer.array();
    }

    public static byte[] parseLittleStr(byte[] bytes) {
        ByteBuffer byteBuffer = EIGHT_BYTE_BUFFER.get();
        int length = bytes.length;
        byteBuffer.clear();
        byteBuffer.put(bytes);
        for (int i = length; i < 7; ++i) {
            byteBuffer.put((byte) 0);
        }
        byteBuffer.put((byte) (bytes.length | 0x80));
        return byteBuffer.array();
    }

    public static byte[] serialize(List recordList, HttpConfig.CompressType compressType) {
        BatchBinaryRecord batch = new BatchBinaryRecord();
        for (RecordEntry entry : recordList) {
            batch.addRecord(convertToBinaryRecord(entry));
        }
        return batch.serialize(compressType);
    }

    public static List deserialize(byte[] data, RecordRespMeta meta, RecordSchema schema) {
        BatchBinaryRecord batch = BatchBinaryRecord.parseFrom(data, schema);

        List records = new ArrayList<>();
        for (BinaryRecord binaryRecord : batch.getRecords()) {
            records.add(convertFromBinaryRecord(binaryRecord, meta, schema));
        }
        return records;
    }

    private static RecordEntry convertFromBinaryRecord(BinaryRecord binaryRecord, RecordRespMeta meta, RecordSchema schema) {
        RecordEntry record = new RecordEntry();
        record.setCursor(meta.getCursor());
        record.setSequence(meta.getSequence());
        record.setSerial(meta.getSerial());
        record.setSystemTime(meta.getSystemTime());

        record.setAttributes(binaryRecord.getAttrMap());
        if (schema != null) {
            TupleRecordData data = new TupleRecordData(schema);
            for (int i = 0; i < schema.getFields().size(); ++i) {
                data.setField(i, binaryRecord.getField(i));
            }
            record.setRecordData(data);
        } else {
            byte[] bytes = (byte[]) binaryRecord.getField(0);
            BlobRecordData data = new BlobRecordData(bytes);
            record.setRecordData(data);
        }
        return record;
    }

    private static BinaryRecord convertToBinaryRecord(RecordEntry entry) {
        RecordData recordData = entry.getRecordData();
        BinaryRecord record = null;
        if (recordData instanceof TupleRecordData) {
            record = convertTupleToBinaryRecord((TupleRecordData)(entry.getRecordData()));
        } else {
            record = convertBlobToBinaryRecord((BlobRecordData)(entry.getRecordData()));
        }

        if (entry.getAttributes() != null) {
            for (Map.Entry item : entry.getAttributes().entrySet()) {
                record.addAttribute(item.getKey(), item.getValue());
            }
        }

        return record;
    }

    private static BinaryRecord convertTupleToBinaryRecord(TupleRecordData data) {
        RecordSchema schema = data.getRecordSchema();
        BinaryRecord record = new BinaryRecord(schema);
        for (int i = 0; i < schema.getFields().size(); ++i) {
            Object value = data.getField(i);
            if (value != null) {
                record.setField(i, value);
            }
        }
        return record;
    }

    private static BinaryRecord convertBlobToBinaryRecord(BlobRecordData data) {
        BinaryRecord record = new BinaryRecord(null);
        record.setField(0, data.getData());
        return record;
    }

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy