com.flipkart.hbaseobjectmapper.HBObjectMapper Maven / Gradle / Ivy
Show all versions of hbase-object-mapper Show documentation
package com.flipkart.hbaseobjectmapper;
import com.flipkart.hbaseobjectmapper.codec.BestSuitCodec;
import com.flipkart.hbaseobjectmapper.codec.Codec;
import com.flipkart.hbaseobjectmapper.codec.exceptions.DeserializationException;
import com.flipkart.hbaseobjectmapper.codec.exceptions.SerializationException;
import com.flipkart.hbaseobjectmapper.exceptions.InternalError;
import com.flipkart.hbaseobjectmapper.exceptions.*;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.Serializable;
import java.lang.reflect.*;
import java.util.*;
/**
* An object mapper class that helps
* - serialize objects of your bean-like class to HBase's {@link Put} and {@link Result} objects
* - deserialize HBase's {@link Put} and {@link Result} objects to objects of your bean-like class
*
* where, your bean-like class is like any other POJO/bean but implements {@link HBRecord} interface.
*
*
* This class is for use in:
* - MapReduce jobs which read from and/or write to HBase tables
* - Unit-tests
*
* This class is thread-safe.
*
* This class is designed in such a way that only one instance needs to be maintained for the entire lifecycle of your program.
*
* @see POJO
* @see JavaBeans
*/
public class HBObjectMapper {
private final Codec codec;
/**
* Instantiate object of this class with a custom {@link Codec}
*
* @param codec Codec to be used for serialization and deserialization of fields
* @see #HBObjectMapper()
*/
public HBObjectMapper(Codec codec) {
if (codec == null) {
throw new IllegalArgumentException("Parameter 'codec' cannot be null. If you want to use the default codec, use the no-arg constructor");
}
this.codec = codec;
}
/**
* Instantiate an object of this class with default {@link Codec} (that is, {@link BestSuitCodec})
*
* @see #HBObjectMapper(Codec)
*/
public HBObjectMapper() {
this(new BestSuitCodec());
}
/**
* Serialize row key
*
* @param rowKey Object representing row key
* @param Data type of row key
* @return Byte array
*/
> byte[] rowKeyToBytes(R rowKey, Map codecFlags) {
return valueToByteArray(rowKey, codecFlags);
}
@SuppressWarnings("unchecked")
, T extends HBRecord> R bytesToRowKey(byte[] rowKeyBytes, Map codecFlags, Class entityClass) {
try {
return (R) byteArrayToValue(rowKeyBytes, entityClass.getDeclaredMethod("composeRowKey").getReturnType(), codecFlags);
} catch (NoSuchMethodException e) {
throw new InternalError(e);
}
}
/**
* Core method that drives deserialization
*
* @see #convertRecordToMap(HBRecord)
*/
private , T extends HBRecord> T convertMapToRecord(
byte[] rowKeyBytes,
NavigableMap>> map,
Class clazz) {
Collection fields = getHBColumnFields0(clazz).values();
WrappedHBTable hbTable = new WrappedHBTable<>(clazz);
R rowKey = bytesToRowKey(rowKeyBytes, hbTable.getCodecFlags(), clazz);
T record;
try {
record = clazz.getDeclaredConstructor()
.newInstance();
} catch (Exception ex) {
throw new ObjectNotInstantiatableException("Error while instantiating empty constructor of " + clazz.getName(), ex);
}
try {
record.parseRowKey(rowKey);
} catch (Exception ex) {
throw new RowKeyCouldNotBeParsedException(String.format("Supplied row key \"%s\" could not be parsed", rowKey), ex);
}
for (Field field : fields) {
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
NavigableMap> familyMap = map.get(hbColumn.familyBytes());
if (familyMap == null || familyMap.isEmpty()) {
continue;
}
NavigableMap columnVersionsMap = familyMap.get(hbColumn.columnBytes());
if (hbColumn.isSingleVersioned()) {
if (columnVersionsMap == null || columnVersionsMap.isEmpty()) {
continue;
}
Map.Entry firstEntry = columnVersionsMap.firstEntry();
objectSetFieldValue(record, field, firstEntry.getValue(), hbColumn.codecFlags());
} else {
objectSetFieldValue(record, field, columnVersionsMap, hbColumn.codecFlags());
}
}
return record;
}
/**
* Converts a {@link Serializable} object into a byte[]
*
* @param value Object to be serialized
* @param codecFlags Flags to be passed to Codec
* @return Byte-array representing serialized object
* @see #byteArrayToValue(byte[], Type, Map)
*/
byte[] valueToByteArray(Serializable value, Map codecFlags) {
try {
return codec.serialize(value, codecFlags);
} catch (SerializationException e) {
throw new CodecException("Couldn't serialize", e);
}
}
/**
* Serialize an object to HBase's {@link ImmutableBytesWritable}.
*
This method is for use in Mappers, unit-tests for Mappers and unit-tests for Reducers.
*
* @param value Object to be serialized
* @return Byte array, wrapped in HBase's data type
* @see #getRowKey
*/
public ImmutableBytesWritable toIbw(Serializable value) {
return new ImmutableBytesWritable(valueToByteArray(value, null));
}
, T extends HBRecord> WrappedHBTable validateHBClass(Class clazz) {
Constructor> constructor;
try {
constructor = clazz.getDeclaredConstructor();
} catch (NoSuchMethodException e) {
throw new NoEmptyConstructorException(clazz, e);
}
if (!Modifier.isPublic(constructor.getModifiers())) {
throw new EmptyConstructorInaccessibleException(String.format("Empty constructor of class %s is inaccessible. It needs to be public.", clazz.getName()));
}
int numOfHBColumns = 0;
WrappedHBTable hbTable = new WrappedHBTable<>(clazz);
Set columns = new HashSet<>(clazz.getDeclaredFields().length, 1.0f);
Map hbColumnFields = getHBColumnFields0(clazz);
for (Field field : hbColumnFields.values()) {
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
if (hbColumn.isPresent()) {
if (!hbTable.isColumnFamilyPresent(hbColumn.family())) {
throw new IllegalArgumentException(String.format("Class %s has field '%s' mapped to HBase column '%s' - but column family '%s' isn't configured in @%s annotation",
clazz.getName(), field.getName(), hbColumn, hbColumn.family(), HBTable.class.getSimpleName()));
}
if (hbColumn.isSingleVersioned()) {
validateHBColumnSingleVersionField(field);
} else if (hbColumn.isMultiVersioned()) {
validateHBColumnMultiVersionField(field);
}
if (!columns.add(new FamilyAndColumn(hbColumn.family(), hbColumn.column()))) {
throw new FieldsMappedToSameColumnException(String.format("Class %s has more than one field (e.g. '%s') mapped to same HBase column %s", clazz.getName(), field.getName(), hbColumn));
}
numOfHBColumns++;
}
}
if (numOfHBColumns == 0) {
throw new MissingHBColumnFieldsException(clazz);
}
return hbTable;
}
/**
* Internal note: This should be in sync with {@link #getFieldType(Field, boolean)}
*/
private void validateHBColumnMultiVersionField(Field field) {
validateHBColumnField(field);
if (!(field.getGenericType() instanceof ParameterizedType)) {
throw new IncompatibleFieldForHBColumnMultiVersionAnnotationException(String.format("Field %s is not even a parameterized type", field));
}
if (field.getType() != NavigableMap.class) {
throw new IncompatibleFieldForHBColumnMultiVersionAnnotationException(String.format("Field %s is not a NavigableMap", field));
}
ParameterizedType pType = (ParameterizedType) field.getGenericType();
Type[] typeArguments = pType.getActualTypeArguments();
if (typeArguments.length != 2 || typeArguments[0] != Long.class) {
throw new IncompatibleFieldForHBColumnMultiVersionAnnotationException(String.format("Field %s has unexpected type params (Key should be of %s type)", field, Long.class.getName()));
}
if (!codec.canDeserialize(getFieldType(field, true))) {
throw new UnsupportedFieldTypeException(String.format("Field %s in class %s is of unsupported type Navigable ", field.getName(), field.getDeclaringClass().getName(), field.getDeclaringClass().getName()));
}
}
/**
* Internal note: For multi-version usecase, this should be in sync with {@link #validateHBColumnMultiVersionField(Field)}
*/
Type getFieldType(Field field, boolean isMultiVersioned) {
if (isMultiVersioned) {
return ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[1];
} else {
return field.getGenericType();
}
}
private void validateHBColumnSingleVersionField(Field field) {
validateHBColumnField(field);
Type fieldType = getFieldType(field, false);
if (fieldType instanceof Class) {
Class> fieldClazz = (Class>) fieldType;
if (fieldClazz.isPrimitive()) {
throw new MappedColumnCantBePrimitiveException(String.format("Field %s in class %s is a primitive of type %s (Primitive data types are not supported as they're not nullable)", field.getName(), field.getDeclaringClass().getName(), fieldClazz.getName()));
}
}
if (!codec.canDeserialize(fieldType)) {
throw new UnsupportedFieldTypeException(String.format("Field %s in class %s is of unsupported type (%s)", field.getName(), field.getDeclaringClass().getName(), fieldType));
}
}
private void validateHBColumnField(Field field) {
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
int modifiers = field.getModifiers();
if (Modifier.isTransient(modifiers)) {
throw new MappedColumnCantBeTransientException(field, hbColumn.getName());
}
if (Modifier.isStatic(modifiers)) {
throw new MappedColumnCantBeStaticException(field, hbColumn.getName());
}
}
/**
* Core method that drives serialization
*
* @see #convertMapToRecord(byte[], NavigableMap, Class)
*/
@SuppressWarnings("unchecked")
private , T extends HBRecord>
NavigableMap>> convertRecordToMap(T record) {
Class clazz = (Class) record.getClass();
Collection fields = getHBColumnFields0(clazz).values();
NavigableMap>> map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
int numOfFieldsToWrite = 0;
for (Field field : fields) {
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
if (hbColumn.isSingleVersioned()) {
byte[] familyName = hbColumn.familyBytes(), columnName = hbColumn.columnBytes();
if (!map.containsKey(familyName)) {
map.put(familyName, new TreeMap<>(Bytes.BYTES_COMPARATOR));
}
Map> columns = map.get(familyName);
final byte[] fieldValueBytes = getFieldValueAsBytes(record, field, hbColumn.codecFlags());
if (fieldValueBytes == null || fieldValueBytes.length == 0) {
continue;
}
NavigableMap singleValue = new TreeMap<>();
singleValue.put(HConstants.LATEST_TIMESTAMP, fieldValueBytes);
columns.put(columnName, singleValue);
numOfFieldsToWrite++;
} else if (hbColumn.isMultiVersioned()) {
NavigableMap fieldValueVersions = getFieldValuesAsNavigableMapOfBytes(record, field, hbColumn.codecFlags());
if (fieldValueVersions == null)
continue;
byte[] familyName = hbColumn.familyBytes(), columnName = hbColumn.columnBytes();
if (!map.containsKey(familyName)) {
map.put(familyName, new TreeMap<>(Bytes.BYTES_COMPARATOR));
}
Map> columns = map.get(familyName);
columns.put(columnName, fieldValueVersions);
numOfFieldsToWrite++;
}
}
if (numOfFieldsToWrite == 0) {
throw new AllHBColumnFieldsNullException();
}
return map;
}
private , T extends HBRecord> byte[] getFieldValueAsBytes(T record, Field field, Map codecFlags) {
Serializable fieldValue;
try {
field.setAccessible(true);
fieldValue = (Serializable) field.get(record);
} catch (IllegalAccessException e) {
throw new BadHBaseLibStateException(e);
}
return valueToByteArray(fieldValue, codecFlags);
}
private , T extends HBRecord> NavigableMap getFieldValuesAsNavigableMapOfBytes(T record, Field field, Map codecFlags) {
try {
field.setAccessible(true);
@SuppressWarnings("unchecked")
NavigableMap fieldValueVersions = (NavigableMap) field.get(record);
if (fieldValueVersions == null)
return null;
if (fieldValueVersions.size() == 0) {
throw new FieldAnnotatedWithHBColumnMultiVersionCantBeEmpty();
}
NavigableMap output = new TreeMap<>();
for (Map.Entry e : fieldValueVersions.entrySet()) {
Long timestamp = e.getKey();
R fieldValue = e.getValue();
if (fieldValue == null)
continue;
byte[] fieldValueBytes = valueToByteArray(fieldValue, codecFlags);
output.put(timestamp, fieldValueBytes);
}
return output;
} catch (IllegalAccessException e) {
throw new BadHBaseLibStateException(e);
}
}
/**
* Converts an object of your bean-like class to HBase's {@link Put} object.
*
This method is for use in a MapReduce job whose Reducer
class extends HBase's org.apache.hadoop.hbase.mapreduce.TableReducer
class (in other words, a MapReduce job whose output is an HBase table)
*
* @param record An object of your bean-like class (one that implements {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return HBase's {@link Put} object
*/
@SuppressWarnings("unchecked")
public , T extends HBRecord> Put writeValueAsPut(T record) {
validateHBClass((Class) record.getClass());
return writeValueAsPut0(record);
}
, T extends HBRecord> Put writeValueAsPut0(T record) {
Put put = new Put(composeRowKey(record));
for (Map.Entry>> fe : convertRecordToMap(record).entrySet()) {
byte[] family = fe.getKey();
for (Map.Entry> e : fe.getValue().entrySet()) {
byte[] columnName = e.getKey();
NavigableMap columnValuesVersioned = e.getValue();
if (columnValuesVersioned == null)
continue;
for (Map.Entry versionAndValue : columnValuesVersioned.entrySet()) {
put.addColumn(family, columnName, versionAndValue.getKey(), versionAndValue.getValue());
}
}
}
return put;
}
/**
* A bulk version of {@link #writeValueAsPut(HBRecord)} method
*
* @param records List of objects of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @param Entity type
* @return List of HBase's {@link Put} objects
*/
public , T extends HBRecord> List writeValueAsPut(List records) {
List puts = new ArrayList<>(records.size());
for (T record : records) {
Put put = writeValueAsPut(record);
puts.add(put);
}
return puts;
}
/**
* Converts an object of your bean-like class to HBase's {@link Result} object.
*
This method is for use in unit-tests of a MapReduce job whose Mapper
class extends org.apache.hadoop.hbase.mapreduce.TableMapper
class (in other words, a MapReduce job whose input in an HBase table)
*
* @param record object of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @param Entity type
* @return HBase's {@link Result} object
*/
@SuppressWarnings("unchecked")
public , T extends HBRecord> Result writeValueAsResult(T record) {
validateHBClass((Class) record.getClass());
byte[] row = composeRowKey(record);
List cellList = new ArrayList<>();
for (Map.Entry>> fe : convertRecordToMap(record).entrySet()) {
byte[] family = fe.getKey();
for (Map.Entry> e : fe.getValue().entrySet()) {
byte[] columnName = e.getKey();
NavigableMap valuesVersioned = e.getValue();
if (valuesVersioned == null)
continue;
for (Map.Entry columnVersion : valuesVersioned.entrySet()) {
CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.DEEP_COPY);
cellBuilder.setType(Cell.Type.Put).setRow(row).setFamily(family).setQualifier(columnName).setTimestamp(columnVersion.getKey()).setValue(columnVersion.getValue());
Cell cell = cellBuilder.build();
cellList.add(cell);
}
}
}
return Result.create(cellList);
}
/**
* A bulk version of {@link #writeValueAsResult(HBRecord)} method
*
* @param records List of objects of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @param Entity type
* @return List of HBase's {@link Result} objects
*/
public , T extends HBRecord> List writeValueAsResult(List records) {
List results = new ArrayList<>(records.size());
for (T record : records) {
Result result = writeValueAsResult(record);
results.add(result);
}
return results;
}
/**
* Converts HBase's {@link Result} object to an object of your bean-like class.
* This method is for use in a MapReduce job whose Mapper class extends org.apache.hadoop.hbase.mapreduce.TableMapper class (in other words, a MapReduce job whose input is an HBase table)
*
* @param rowKey Row key of the record that corresponds to {@link Result}. If this is null , an attempt will be made to resolve it from {@link Result}
* @param result HBase's {@link Result} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Object of bean-like class
* @throws CodecException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(ImmutableBytesWritable rowKey, Result result, Class clazz) {
validateHBClass(clazz);
if (rowKey == null)
return readValueFromResult(result, clazz);
else
return readValueFromRowAndResult(rowKey.get(), result, clazz);
}
/**
* A compact version of {@link #readValue(ImmutableBytesWritable, Result, Class)} method
*
* @param result HBase's {@link Result} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Object of bean-like class
* @throws CodecException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(Result result, Class clazz) {
validateHBClass(clazz);
return readValueFromResult(result, clazz);
}
private boolean isResultEmpty(Result result) {
if (result == null || result.isEmpty()) return true;
byte[] rowBytes = result.getRow();
return rowBytes == null || rowBytes.length == 0;
}
, T extends HBRecord> T readValueFromResult(Result result, Class clazz) {
if (isResultEmpty(result)) return null;
return convertMapToRecord(result.getRow(), result.getMap(), clazz);
}
private , T extends HBRecord> T readValueFromRowAndResult(byte[] rowKeyBytes, Result result, Class clazz) {
if (isResultEmpty(result)) {
return null;
}
return convertMapToRecord(rowKeyBytes, result.getMap(), clazz);
}
private void objectSetFieldValue(Object obj, Field field, NavigableMap columnValuesVersioned, Map codecFlags) {
if (columnValuesVersioned == null)
return;
try {
field.setAccessible(true);
NavigableMap columnValuesVersionedBoxed = new TreeMap<>();
for (Map.Entry versionAndValue : columnValuesVersioned.entrySet()) {
columnValuesVersionedBoxed.put(versionAndValue.getKey(), byteArrayToValue(versionAndValue.getValue(), ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[1], codecFlags));
}
field.set(obj, columnValuesVersionedBoxed);
} catch (Exception ex) {
throw new ConversionFailedException(String.format("Could not set value on field \"%s\" on instance of class %s", field.getName(), obj.getClass()), ex);
}
}
private void objectSetFieldValue(Object obj, Field field, byte[] value, Map codecFlags) {
if (value == null || value.length == 0)
return;
try {
field.setAccessible(true);
field.set(obj, byteArrayToValue(value, field.getGenericType(), codecFlags));
} catch (IllegalAccessException e) {
throw new ConversionFailedException(String.format("Could not set value on field \"%s\" on instance of class %s", field.getName(), obj.getClass()), e);
}
}
/**
* Converts a byte array representing HBase column data to appropriate data type (boxed as object)
*
* @see #valueToByteArray(Serializable, Map)
*/
Object byteArrayToValue(byte[] value, Type type, Map codecFlags) {
try {
if (value == null || value.length == 0)
return null;
else
return codec.deserialize(value, type, codecFlags);
} catch (DeserializationException e) {
throw new CodecException("Error while deserializing", e);
}
}
/**
* Converts HBase's {@link Put} object to an object of your bean-like class
* This method is for use in unit-tests of a MapReduce job whose Reducer class extends org.apache.hadoop.hbase.mapreduce.TableReducer class (in other words, a MapReduce job whose output is an HBase table)
*
* @param rowKey Row key of the record that corresponds to {@link Put}. If this is null , an attempt will be made to resolve it from {@link Put} object
* @param put HBase's {@link Put} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Object of bean-like class
* @throws CodecException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(ImmutableBytesWritable rowKey, Put put, Class clazz) {
validateHBClass(clazz);
if (rowKey == null)
return readValueFromPut(put, clazz);
else
return readValueFromRowAndPut(rowKey.get(), put, clazz);
}
/**
* A variant of {@link #readValue(ImmutableBytesWritable, Put, Class)} method
*
* @param rowKey Row key of the record that corresponds to {@link Put}. If this is null , an attempt will be made to resolve it from {@link Put} object
* @param put HBase's {@link Put} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Object of bean-like class
* @throws CodecException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
, T extends HBRecord> T readValue(R rowKey, Put put, Class clazz) {
if (rowKey == null) {
return readValueFromPut(put, clazz);
} else {
return readValueFromRowAndPut(rowKeyToBytes(rowKey, WrappedHBTable.getCodecFlags(clazz)), put, clazz);
}
}
private , T extends HBRecord> T readValueFromRowAndPut(byte[] rowKeyBytes, Put put, Class clazz) {
Map> rawMap = put.getFamilyCellMap();
NavigableMap>> map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (Map.Entry> familyNameAndColumnValues : rawMap.entrySet()) {
byte[] family = familyNameAndColumnValues.getKey();
if (!map.containsKey(family)) {
map.put(family, new TreeMap<>(Bytes.BYTES_COMPARATOR));
}
List cellList = familyNameAndColumnValues.getValue();
for (Cell cell : cellList) {
byte[] column = CellUtil.cloneQualifier(cell);
if (!map.get(family).containsKey(column)) {
map.get(family).put(column, new TreeMap<>());
}
map.get(family).get(column).put(cell.getTimestamp(), CellUtil.cloneValue(cell));
}
}
return convertMapToRecord(rowKeyBytes, map, clazz);
}
private , T extends HBRecord> T readValueFromPut(Put put, Class clazz) {
if (put == null || put.isEmpty() || put.getRow() == null || put.getRow().length == 0) {
return null;
}
return readValueFromRowAndPut(put.getRow(), put, clazz);
}
/**
* A compact version of {@link #readValue(ImmutableBytesWritable, Put, Class)} method
*
* @param put HBase's {@link Put} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Object of bean-like class
* @throws CodecException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(Put put, Class clazz) {
validateHBClass(clazz);
return readValueFromPut(put, clazz);
}
/**
* Get row key (for use in HBase) from a bean-line object.
* For use in:
*
* - reducer jobs that extend HBase's
org.apache.hadoop.hbase.mapreduce.TableReducer class
* - unit tests for mapper jobs that extend HBase's
org.apache.hadoop.hbase.mapreduce.TableMapper class
*
*
* @param record object of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @param Entity type
* @return Serialised row key wrapped in {@link ImmutableBytesWritable}
* @see #toIbw(Serializable)
*/
@SuppressWarnings("unchecked")
public , T extends HBRecord> ImmutableBytesWritable getRowKey(T record) {
if (record == null) {
throw new NullPointerException("Cannot compose row key for null objects");
}
validateHBClass((Class) record.getClass());
return new ImmutableBytesWritable(composeRowKey(record));
}
private , T extends HBRecord> byte[] composeRowKey(T record) {
R rowKey;
try {
rowKey = record.composeRowKey();
} catch (Exception ex) {
throw new RowKeyCantBeComposedException(ex);
}
if (rowKey == null || rowKey.toString().isEmpty()) {
throw new RowKeyCantBeEmptyException();
}
@SuppressWarnings("unchecked")
WrappedHBTable hbTable = new WrappedHBTable<>((Class) record.getClass());
return valueToByteArray(rowKey, hbTable.getCodecFlags());
}
/**
* Get list of column families and their max versions, mapped in definition of your bean-like class
*
* @param clazz {@link Class} that you're reading (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Map of column families and their max versions
*/
, T extends HBRecord> Map getColumnFamiliesAndVersions(Class clazz) {
final WrappedHBTable hbTable = validateHBClass(clazz);
return hbTable.getFamiliesAndVersions();
}
/**
* Checks whether input class can be converted to HBase data types and vice-versa
*
* @param clazz {@link Class} you intend to validate (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return true or false
*/
public , T extends HBRecord> boolean isValid(Class clazz) {
try {
validateHBClass(clazz);
return true;
} catch (Exception ex) {
return false;
}
}
/**
* For your bean-like {@link Class}, get all fields mapped to HBase columns
*
* @param clazz Bean-like {@link Class} (must implement {@link HBRecord} interface) whose fields you intend to read
* @param Data type of row key
* @param Entity type
* @return A {@link Map} with keys as field names and values as instances of {@link Field}
*/
public , T extends HBRecord> Map getHBColumnFields(Class clazz) {
validateHBClass(clazz);
return getHBColumnFields0(clazz);
}
, T extends HBRecord> Map getHBColumnFields0(Class clazz) {
Map mappings = new LinkedHashMap<>();
Class> thisClass = clazz;
while (thisClass != null && thisClass != Object.class) {
for (Field field : thisClass.getDeclaredFields()) {
if (new WrappedHBColumn(field).isPresent()) {
mappings.put(field.getName(), field);
}
}
Class> parentClass = thisClass.getSuperclass();
thisClass = parentClass.isAnnotationPresent(MappedSuperClass.class) ? parentClass : null;
}
return mappings;
}
} | |