com.flipkart.hbaseobjectmapper.HBObjectMapper Maven / Gradle / Ivy
Show all versions of hbase-object-mapper Show documentation
package com.flipkart.hbaseobjectmapper;
import com.flipkart.hbaseobjectmapper.codec.BestSuitCodec;
import com.flipkart.hbaseobjectmapper.codec.Codec;
import com.flipkart.hbaseobjectmapper.codec.DeserializationException;
import com.flipkart.hbaseobjectmapper.codec.SerializationException;
import com.flipkart.hbaseobjectmapper.exceptions.*;
import com.flipkart.hbaseobjectmapper.exceptions.InternalError;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import java.io.Serializable;
import java.lang.reflect.*;
import java.util.*;
/**
* An object mapper class that helps convert objects of your bean-like class to HBase's {@link Put} and {@link Result} objects (and vice-versa).
* This class is for use in MapReduce jobs which read from and/or write to HBase tables and their unit-tests.
* This class is thread-safe.
*/
public class HBObjectMapper {
private static final Codec DEFAULT_CODEC = new BestSuitCodec();
private final Codec codec;
/**
* Instantiate object of this class with a custom {@link Codec}
*
* @param codec Codec to be used for serialization and deserialization of fields
*/
public HBObjectMapper(Codec codec) {
this.codec = codec;
}
/**
* Instantiate an object of this class with default {@link Codec} of {@link BestSuitCodec}
*/
public HBObjectMapper() {
this(DEFAULT_CODEC);
}
/**
* Serialize row key
*
* @param rowKey Object representing row key
* @param Data type of row key
* @return Byte array
*/
> byte[] rowKeyToBytes(R rowKey) {
return valueToByteArray(rowKey, null);
}
@SuppressWarnings("unchecked")
private , T extends HBRecord> R bytesToRowKey(byte[] rowKeyBytes, Class entityClass) throws DeserializationException {
try {
return (R) byteArrayToValue(rowKeyBytes, entityClass.getDeclaredMethod("composeRowKey").getReturnType(), null);
} catch (NoSuchMethodException e) {
throw new InternalError(e);
}
}
private , T extends HBRecord> T mapToObj(byte[] rowKeyBytes, NavigableMap>> map, Class clazz) throws DeserializationException {
R rowKey = bytesToRowKey(rowKeyBytes, clazz);
T obj;
validateHBClass(clazz);
try {
obj = clazz.newInstance();
} catch (Exception ex) {
throw new ObjectNotInstantiatableException("Error while instantiating empty constructor of " + clazz.getName(), ex);
}
try {
obj.parseRowKey(rowKey);
} catch (Exception ex) {
throw new RowKeyCouldNotBeParsedException(String.format("Supplied row key \"%s\" could not be parsed", rowKey), ex);
}
for (Field field : clazz.getDeclaredFields()) {
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
if (hbColumn.isSingleVersioned()) {
NavigableMap> familyMap = map.get(Bytes.toBytes(hbColumn.family()));
if (familyMap == null || familyMap.isEmpty())
continue;
NavigableMap columnVersionsMap = familyMap.get(Bytes.toBytes(hbColumn.column()));
if (columnVersionsMap == null || columnVersionsMap.isEmpty())
continue;
Map.Entry lastEntry = columnVersionsMap.lastEntry();
objectSetFieldValue(obj, field, lastEntry.getValue(), hbColumn.codecFlags());
} else if (hbColumn.isMultiVersioned()) {
NavigableMap> familyMap = map.get(Bytes.toBytes(hbColumn.family()));
if (familyMap == null || familyMap.isEmpty())
continue;
NavigableMap columnVersionsMap = familyMap.get(Bytes.toBytes(hbColumn.column()));
objectSetFieldValue(obj, field, columnVersionsMap, hbColumn.codecFlags());
}
}
return obj;
}
private , T extends HBRecord> boolean isFieldNull(Field field, HBRecord obj) {
try {
field.setAccessible(true);
return field.get(obj) == null;
} catch (IllegalAccessException e) {
throw new ConversionFailedException("Field " + field.getName() + " could not be accessed", e);
}
}
/**
* Converts a {@link Serializable} object into a byte[]
*
* @param value Object to be serialized
* @param codecFlags Flags to be passed to Codec
* @param Data type of row key
* @return Byte-array representing serialized object
*/
public > byte[] valueToByteArray(R value, Map codecFlags) {
try {
try {
return codec.serialize(value, codecFlags);
} catch (SerializationException jpx) {
throw new ConversionFailedException("Don't know how to convert field to byte array", jpx);
}
} catch (IllegalArgumentException e) {
throw new BadHBaseLibStateException(e);
}
}
public > byte[] valueToByteArray(R value) {
return valueToByteArray(value, null);
}
/**
* Converts an object representing an HBase row key into HBase's {@link ImmutableBytesWritable}.
* This method is for use in Mappers, uni-tests for Mappers and unit-tests for Reducers.
*
* @param rowKey Row key object to be serialized
* @param Data type of row key
* @return Byte array, wrapped in HBase's data type
*/
public > ImmutableBytesWritable rowKeyToIbw(R rowKey) {
return new ImmutableBytesWritable(valueToByteArray(rowKey));
}
private , T extends HBRecord> void validateHBClass(Class clazz) {
Constructor constructor;
try {
Set> columns = new HashSet<>();
constructor = clazz.getDeclaredConstructor();
int numOfHBColumns = 0, numOfHBRowKeys = 0;
for (Field field : clazz.getDeclaredFields()) {
if (field.isAnnotationPresent(HBRowKey.class)) {
numOfHBRowKeys++;
}
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
if (hbColumn.isSingleVersioned()) {
validateHBColumnSingleVersionField(field);
numOfHBColumns++;
if (!columns.add(new Pair<>(hbColumn.family(), hbColumn.column()))) {
throw new FieldsMappedToSameColumnException(String.format("Class %s has two fields mapped to same column %s:%s", clazz.getName(), hbColumn.family(), hbColumn.column()));
}
} else if (hbColumn.isMultiVersioned()) {
validateHBColumnMultiVersionField(field);
numOfHBColumns++;
if (!columns.add(new Pair<>(hbColumn.family(), hbColumn.column()))) {
throw new FieldsMappedToSameColumnException(String.format("Class %s has two fields mapped to same column %s:%s", clazz.getName(), hbColumn.family(), hbColumn.column()));
}
}
}
if (numOfHBColumns == 0) {
throw new MissingHBColumnFieldsException(clazz);
}
if (numOfHBRowKeys == 0) {
throw new MissingHBRowKeyFieldsException(clazz);
}
} catch (NoSuchMethodException e) {
throw new NoEmptyConstructorException(String.format("Class %s needs to specify an empty constructor", clazz.getName()), e);
}
if (!Modifier.isPublic(constructor.getModifiers())) {
throw new EmptyConstructorInaccessibleException(String.format("Empty constructor of class %s is inaccessible", clazz.getName()));
}
}
/**
* Internal note: This should be in sync with {@link #getFieldType(Field, boolean)}
*/
private void validateHBColumnMultiVersionField(Field field) {
validationHBColumnField(field);
if (!(field.getGenericType() instanceof ParameterizedType)) {
throw new IncompatibleFieldForHBColumnMultiVersionAnnotationException("Field " + field + " is not even a parameterized type");
}
if (field.getType() != NavigableMap.class) {
throw new IncompatibleFieldForHBColumnMultiVersionAnnotationException("Field " + field + " is not a NavigableMap");
}
ParameterizedType pType = (ParameterizedType) field.getGenericType();
Type[] typeArguments = pType.getActualTypeArguments();
if (typeArguments.length != 2 || typeArguments[0] != Long.class) {
throw new IncompatibleFieldForHBColumnMultiVersionAnnotationException("Field " + field + " has unexpected type params (Key should be of " + Long.class.getName() + " type)");
}
if (!codec.canDeserialize(getFieldType(field, true))) {
throw new UnsupportedFieldTypeException(String.format("Field %s in class %s is of unsupported type Navigable ", field.getName(), field.getDeclaringClass().getName(), field.getDeclaringClass().getName()));
}
}
/**
* Internal note: For multi-version usecase, this should be in sync with {@link #validateHBColumnMultiVersionField(Field)}
*/
Type getFieldType(Field field, boolean isMultiVersioned) {
if (isMultiVersioned) {
return ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[1];
} else {
return field.getGenericType();
}
}
private void validateHBColumnSingleVersionField(Field field) {
validationHBColumnField(field);
Type fieldType = getFieldType(field, false);
if (fieldType instanceof Class) {
Class fieldClazz = (Class) fieldType;
if (fieldClazz.isPrimitive()) {
throw new MappedColumnCantBePrimitiveException(String.format("Field %s in class %s is a primitive of type %s (Primitive data types are not supported as they're not nullable)", field.getName(), field.getDeclaringClass().getName(), fieldClazz.getName()));
}
}
if (!codec.canDeserialize(fieldType)) {
throw new UnsupportedFieldTypeException(String.format("Field %s in class %s is of unsupported type (%s)", field.getName(), field.getDeclaringClass().getName(), fieldType));
}
}
private void validationHBColumnField(Field field) {
@SuppressWarnings("unchecked")
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
int modifiers = field.getModifiers();
if (Modifier.isTransient(modifiers)) {
throw new MappedColumnCantBeTransientException(field, hbColumn.getName());
}
if (Modifier.isStatic(modifiers)) {
throw new MappedColumnCantBeStaticException(field, hbColumn.getName());
}
}
private > NavigableMap>> objToMap(HBRecord obj) {
Class extends HBRecord> clazz = obj.getClass();
validateHBClass(clazz);
NavigableMap>> map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
int numOfFieldsToWrite = 0;
for (Field field : clazz.getDeclaredFields()) {
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
boolean isRowKey = field.isAnnotationPresent(HBRowKey.class);
if (!hbColumn.isPresent() && !isRowKey)
continue;
if (isRowKey && isFieldNull(field, obj)) {
throw new HBRowKeyFieldCantBeNullException("Field " + field.getName() + " is null (fields part of row key cannot be null)");
}
if (hbColumn.isSingleVersioned()) {
byte[] family = Bytes.toBytes(hbColumn.family()), columnName = Bytes.toBytes(hbColumn.column());
if (!map.containsKey(family)) {
map.put(family, new TreeMap>(Bytes.BYTES_COMPARATOR));
}
Map> columns = map.get(family);
final byte[] fieldValueBytes = getFieldValueAsBytes(obj, field, hbColumn.codecFlags());
if (fieldValueBytes == null || fieldValueBytes.length == 0) {
continue;
}
numOfFieldsToWrite++;
columns.put(columnName, new TreeMap() {
{
put(HConstants.LATEST_TIMESTAMP, fieldValueBytes);
}
});
} else if (hbColumn.isMultiVersioned()) {
NavigableMap fieldValueVersions = getFieldValuesVersioned(field, obj, hbColumn.codecFlags());
if (fieldValueVersions == null)
continue;
byte[] family = Bytes.toBytes(hbColumn.family()), columnName = Bytes.toBytes(hbColumn.column());
if (!map.containsKey(family)) {
map.put(family, new TreeMap>(Bytes.BYTES_COMPARATOR));
}
Map> columns = map.get(family);
numOfFieldsToWrite++;
columns.put(columnName, fieldValueVersions);
}
}
if (numOfFieldsToWrite == 0) {
throw new AllHBColumnFieldsNullException();
}
return map;
}
private > byte[] getFieldValueAsBytes(HBRecord obj, Field field, Map codecFlags) {
R fieldValue;
try {
field.setAccessible(true);
fieldValue = (R) field.get(obj);
} catch (IllegalAccessException e) {
throw new BadHBaseLibStateException(e);
}
return valueToByteArray(fieldValue, codecFlags);
}
private > NavigableMap getFieldValuesVersioned(Field field, HBRecord obj, Map codecFlags) {
try {
field.setAccessible(true);
@SuppressWarnings("unchecked")
NavigableMap fieldValueVersions = (NavigableMap) field.get(obj);
if (fieldValueVersions == null)
return null;
if (fieldValueVersions.size() == 0) {
throw new FieldAnnotatedWithHBColumnMultiVersionCantBeEmpty();
}
NavigableMap output = new TreeMap<>();
for (NavigableMap.Entry e : fieldValueVersions.entrySet()) {
Long timestamp = e.getKey();
R fieldValue = (R) e.getValue();
if (fieldValue == null)
continue;
byte[] fieldValueBytes = valueToByteArray(fieldValue, codecFlags);
output.put(timestamp, fieldValueBytes);
}
return output;
} catch (IllegalAccessException e) {
throw new BadHBaseLibStateException(e);
}
}
/**
* Converts an object of your bean-like class to HBase's {@link Put} object.
* This method is for use in a MapReduce job whose Reducer
class extends HBase's org.apache.hadoop.hbase.mapreduce.TableReducer
class (in other words, a MapReduce job whose output is an HBase table)
*
* @param obj An object of your bean-like class (one that implements {@link HBRecord} interface)
* @param Data type of row key
* @return HBase's {@link Put} object
*/
public > Put writeValueAsPut(HBRecord obj) {
Put put = new Put(composeRowKey(obj));
for (NavigableMap.Entry>> fe : objToMap(obj).entrySet()) {
byte[] family = fe.getKey();
for (Map.Entry> e : fe.getValue().entrySet()) {
byte[] columnName = e.getKey();
NavigableMap columnValuesVersioned = e.getValue();
if (columnValuesVersioned == null)
continue;
for (Map.Entry versionAndValue : columnValuesVersioned.entrySet()) {
put.add(family, columnName, versionAndValue.getKey(), versionAndValue.getValue());
}
}
}
return put;
}
/**
* A bulk version of {@link #writeValueAsPut(HBRecord)} method
*
* @param objects List of objects of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @return List of HBase's {@link Put} objects
*/
public > List writeValueAsPut(List> objects) {
List puts = new ArrayList<>(objects.size());
for (HBRecord obj : objects) {
Put put = writeValueAsPut(obj);
puts.add(put);
}
return puts;
}
/**
* Converts an object of your bean-like class to HBase's {@link Result} object.
* This method is for use in unit-tests of a MapReduce job whose Mapper
class extends org.apache.hadoop.hbase.mapreduce.TableMapper
class (in other words, a MapReduce job whose input in an HBase table)
*
* @param obj object of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @return HBase's {@link Result} object
*/
public > Result writeValueAsResult(HBRecord obj) {
byte[] row = composeRowKey(obj);
List cellList = new ArrayList<>();
for (NavigableMap.Entry>> fe : objToMap(obj).entrySet()) {
byte[] family = fe.getKey();
for (Map.Entry> e : fe.getValue().entrySet()) {
byte[] columnName = e.getKey();
NavigableMap valuesVersioned = e.getValue();
if (valuesVersioned == null)
continue;
for (Map.Entry columnVersion : valuesVersioned.entrySet()) {
cellList.add(new KeyValue(row, family, columnName, columnVersion.getKey(), columnVersion.getValue()));
}
}
}
return Result.create(cellList);
}
/**
* A bulk version of {@link #writeValueAsResult(HBRecord)} method
*
* @param objects List of objects of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @return List of HBase's {@link Result} objects
*/
public > List writeValueAsResult(List> objects) {
List results = new ArrayList<>(objects.size());
for (HBRecord obj : objects) {
Result result = writeValueAsResult(obj);
results.add(result);
}
return results;
}
/**
* Converts HBase's {@link Result} object to an object of your bean-like class.
* This method is for use in a MapReduce job whose Mapper class extends org.apache.hadoop.hbase.mapreduce.TableMapper class (in other words, a MapReduce job whose input is an HBase table)
*
* @param rowKey Row key of the record that corresponds to {@link Result}. If this is null , an attempt will be made to resolve it from {@link Result}
* @param result HBase's {@link Result} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Bean-like object
* @throws DeserializationException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(ImmutableBytesWritable rowKey, Result result, Class clazz) throws DeserializationException {
if (rowKey == null)
return readValueFromResult(result, clazz);
else
return readValueFromRowAndResult(rowKey.get(), result, clazz);
}
/**
* A compact version of {@link #readValue(ImmutableBytesWritable, Result, Class)} method
*
* @param result HBase's {@link Result} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Bean-like object
* @throws DeserializationException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(Result result, Class clazz) throws DeserializationException {
return readValueFromResult(result, clazz);
}
, T extends HBRecord> T readValue(R rowKey, Result result, Class clazz) throws DeserializationException {
if (rowKey == null)
return readValueFromResult(result, clazz);
else
return readValueFromRowAndResult(rowKeyToBytes(rowKey), result, clazz);
}
private boolean isResultEmpty(Result result) {
return result == null || result.isEmpty() || result.getRow() == null || result.getRow().length == 0;
}
private , T extends HBRecord> T readValueFromResult(Result result, Class clazz) throws DeserializationException {
if (isResultEmpty(result)) return null;
return mapToObj(result.getRow(), result.getMap(), clazz);
}
private , T extends HBRecord> T readValueFromRowAndResult(byte[] rowKey, Result result, Class clazz) throws DeserializationException {
if (isResultEmpty(result)) return null;
return mapToObj(rowKey, result.getMap(), clazz);
}
private void objectSetFieldValue(Object obj, Field field, NavigableMap columnValuesVersioned, Map codecFlags) {
if (columnValuesVersioned == null)
return;
try {
field.setAccessible(true);
NavigableMap columnValuesVersionedBoxed = new TreeMap<>();
for (NavigableMap.Entry versionAndValue : columnValuesVersioned.entrySet()) {
columnValuesVersionedBoxed.put(versionAndValue.getKey(), byteArrayToValue(versionAndValue.getValue(), ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[1], codecFlags));
}
field.set(obj, columnValuesVersionedBoxed);
} catch (Exception ex) {
throw new ConversionFailedException("Could not set value on field \"" + field.getName() + "\" on instance of class " + obj.getClass(), ex);
}
}
private void objectSetFieldValue(Object obj, Field field, byte[] value, Map codecFlags) {
if (value == null || value.length == 0)
return;
try {
field.setAccessible(true);
field.set(obj, byteArrayToValue(value, field.getGenericType(), codecFlags));
} catch (Exception ex) {
throw new ConversionFailedException("Could not set value on field \"" + field.getName() + "\" on instance of class " + obj.getClass(), ex);
}
}
/**
* Convert a byte array representing HBase column data to appropriate data type (boxed as object)
*/
Object byteArrayToValue(byte[] value, Type type, Map codecFlags) throws DeserializationException {
if (value == null || value.length == 0)
return null;
else
return codec.deserialize(value, type, codecFlags);
}
/**
* Converts HBase's {@link Put} object to an object of your bean-like class
* This method is for use in unit-tests of a MapReduce job whose Reducer class extends org.apache.hadoop.hbase.mapreduce.TableReducer class (in other words, a MapReduce job whose output is an HBase table)
*
* @param rowKey Row key of the record that corresponds to {@link Put}. If this is null , an attempt will be made to resolve it from {@link Put} object
* @param put HBase's {@link Put} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Bean-like object
* @throws DeserializationException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(ImmutableBytesWritable rowKey, Put put, Class clazz) throws DeserializationException {
if (rowKey == null)
return readValueFromPut(put, clazz);
else
return readValueFromRowAndPut(rowKey.get(), put, clazz);
}
/**
* A variant of {@link #readValue(ImmutableBytesWritable, Put, Class)} method
*
* @param rowKey Row key of the record that corresponds to {@link Put}. If this is null , an attempt will be made to resolve it from {@link Put} object
* @param put HBase's {@link Put} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Bean-like object
* @throws DeserializationException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(String rowKey, Put put, Class clazz) throws DeserializationException {
if (rowKey == null)
return readValueFromPut(put, clazz);
else
return readValueFromRowAndPut(Bytes.toBytes(rowKey), put, clazz);
}
private , T extends HBRecord> T readValueFromRowAndPut(byte[] rowKey, Put put, Class clazz) throws DeserializationException {
Map> rawMap = put.getFamilyCellMap();
NavigableMap>> map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (Map.Entry> familyNameAndColumnValues : rawMap.entrySet()) {
byte[] family = familyNameAndColumnValues.getKey();
if (!map.containsKey(family)) {
map.put(family, new TreeMap>(Bytes.BYTES_COMPARATOR));
}
List cellList = familyNameAndColumnValues.getValue();
for (Cell cell : cellList) {
byte[] column = CellUtil.cloneQualifier(cell);
if (!map.get(family).containsKey(column)) {
map.get(family).put(column, new TreeMap());
}
map.get(family).get(column).put(cell.getTimestamp(), CellUtil.cloneValue(cell));
}
}
return mapToObj(rowKey, map, clazz);
}
private , T extends HBRecord> T readValueFromPut(Put put, Class clazz) throws DeserializationException {
if (put == null || put.isEmpty() || put.getRow() == null || put.getRow().length == 0) {
return null;
}
return readValueFromRowAndPut(put.getRow(), put, clazz);
}
/**
* A compact version of {@link #readValue(ImmutableBytesWritable, Put, Class)} method
*
* @param put HBase's {@link Put} object
* @param clazz {@link Class} to which you want to convert to (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Bean-like object
* @throws DeserializationException One or more column values is a byte[] that couldn't be deserialized into field type (as defined in your entity class)
*/
public , T extends HBRecord> T readValue(Put put, Class clazz) throws DeserializationException {
return readValueFromPut(put, clazz);
}
/**
* Get row key (for use in HBase) from a bean-line object.
* For use in:
*
* - reducer jobs that extend HBase's
org.apache.hadoop.hbase.mapreduce.TableReducer class
* - unit tests for mapper jobs that extend HBase's
org.apache.hadoop.hbase.mapreduce.TableMapper class
*
*
* @param obj object of your bean-like class (of type that extends {@link HBRecord})
* @param Data type of row key
* @return Row key
*/
public > ImmutableBytesWritable getRowKey(HBRecord obj) {
if (obj == null) {
throw new NullPointerException("Cannot compose row key for null objects");
}
return new ImmutableBytesWritable(composeRowKey(obj));
}
private > byte[] composeRowKey(HBRecord obj) {
R rowKey;
try {
rowKey = obj.composeRowKey();
} catch (Exception ex) {
throw new RowKeyCantBeComposedException(ex);
}
if (rowKey == null || rowKey.toString().isEmpty()) {
throw new RowKeyCantBeEmptyException();
}
return valueToByteArray(rowKey, null);
}
/**
* Get list of column families mapped in definition of your bean-like class
*
* @param clazz {@link Class} that you're reading (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return Return set of column families used in input class
*/
public , T extends HBRecord> Set getColumnFamilies(Class clazz) {
validateHBClass(clazz);
Set columnFamilySet = new HashSet<>();
for (Field field : clazz.getDeclaredFields()) {
WrappedHBColumn hbColumn = new WrappedHBColumn(field);
if (hbColumn.isPresent())
columnFamilySet.add(hbColumn.family());
}
return columnFamilySet;
}
/**
* Checks whether input class can be converted to HBase data types and vice-versa
*
* @param clazz {@link Class} you intend to validate (must implement {@link HBRecord} interface)
* @param Data type of row key
* @param Entity type
* @return true or false
*/
public , T extends HBRecord> boolean isValid(Class clazz) {
try {
validateHBClass(clazz);
return true;
} catch (Exception ex) {
return false;
}
}
/**
* For your bean-like {@link Class}, get all fields mapped to HBase columns
*
* @param clazz Bean-like {@link Class} (must implement {@link HBRecord} interface) whose fields you intend to read
* @param Data type of row key
* @param Entity type
* @return A {@link Map} with keys as field names and values as instances of {@link Field}
*/
public , T extends HBRecord> Map getHBFields(Class clazz) {
validateHBClass(clazz);
Map mappings = new HashMap<>();
for (Field field : clazz.getDeclaredFields()) {
if (new WrappedHBColumn(field).isPresent())
mappings.put(field.getName(), field);
}
return mappings;
}
} | |