com.netflix.astyanax.entitystore.CompositeEntityManager Maven / Gradle / Ivy
The newest version!
package com.netflix.astyanax.entitystore;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.persistence.PersistenceException;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.MutationBatchManager;
import com.netflix.astyanax.ThreadLocalMutationBatchManager;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.model.CqlResult;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.query.ColumnFamilyQuery;
import com.netflix.astyanax.query.RowSliceQuery;
import com.netflix.astyanax.recipes.reader.AllRowsReader;
import com.netflix.astyanax.retry.RetryPolicy;
import com.netflix.astyanax.serializers.ByteBufferSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.util.RangeBuilder;
/**
* Entity manager for a composite column family. This entity manager expects
* the entity to have a single @Id which corresponds to the row key. It will then
* have at least 3 columns, with all but the last being parts of the composite.
*
* @Entity
* class Entity {
* @Id String rowKey
* @Column String firstCompositePart;
* @Column Long secondCompositePart;
* @Column String valuePart;
* }
*
*
* @author elandau
*
* @param Entity type
* @param Partition key
*/
public class CompositeEntityManager implements EntityManager {
private static final Logger LOG = LoggerFactory.getLogger(CompositeEntityManager.class);
private static final ConsistencyLevel DEFAULT_CONSISTENCY_LEVEL = ConsistencyLevel.CL_ONE;
public static class Builder {
private Keyspace keyspace;
private Class clazz;
private ColumnFamily columnFamily = null;
private ConsistencyLevel readConsitency = DEFAULT_CONSISTENCY_LEVEL;
private ConsistencyLevel writeConsistency = DEFAULT_CONSISTENCY_LEVEL;
private CompositeEntityMapper entityMapper;
private Integer ttl = null;
private RetryPolicy retryPolicy = null;
private LifecycleEvents lifecycleHandler = null;
private String columnFamilyName = null;
private boolean autoCommit = true;
private MutationBatchManager batchManager = null;
private boolean verbose = false;
private ByteBuffer prefix = null;
/**
* mandatory
* @param clazz entity class type
*/
public Builder withEntityType(Class clazz) {
Preconditions.checkNotNull(clazz);
this.clazz = clazz;
return this;
}
/**
* mandatory
* @param keyspace
*/
public Builder withKeyspace(Keyspace keyspace) {
Preconditions.checkNotNull(keyspace);
this.keyspace = keyspace;
return this;
}
/**
* optional
* @param columnFamilyName Name of column family to use.
*/
public Builder withColumnFamily(String columnFamilyName) {
Preconditions.checkState(this.columnFamilyName == null && columnFamily == null , "withColumnFamily called multiple times");
Preconditions.checkNotNull(columnFamilyName);
this.columnFamilyName = columnFamilyName; // .toLowerCase();
return this;
}
/**
* optional
* @param level
*/
public Builder withReadConsistency(ConsistencyLevel level) {
Preconditions.checkNotNull(level);
this.readConsitency = level;
return this;
}
/**
* optional
* @param level
*/
public Builder withWriteConsistency(ConsistencyLevel level) {
Preconditions.checkNotNull(level);
this.writeConsistency = level;
return this;
}
/**
* set both read and write consistency
* optional
* @param level
*/
public Builder withConsistency(ConsistencyLevel level) {
Preconditions.checkNotNull(level);
this.readConsitency = level;
this.writeConsistency = level;
return this;
}
/**
* default TTL for all columns written to cassandra
* optional
* @return
*/
public Builder withTTL(Integer ttl) {
this.ttl = ttl;
return this;
}
/**
* optional
* @param level
*/
public Builder withRetryPolicy(RetryPolicy policy) {
Preconditions.checkNotNull(policy);
this.retryPolicy = policy;
return this;
}
/**
* If set to false
* @param autoCommit
* @return
*/
public Builder withAutoCommit(boolean autoCommit) {
Preconditions.checkArgument(autoCommit == false && this.batchManager == null, "Cannot use autoCommit with an externally supplied MutationBatchManager");
this.autoCommit = autoCommit;
return this;
}
/**
* If set to true log every action
* @param verbose
* @return
*/
public Builder withVerboseTracing(boolean verbose) {
this.verbose = verbose;
return this;
}
/**
* Specify a mutation manager to use. The mutation manager makes it possible to share
* the same mutation across multiple calls to multiple entity managers and only
* commit when all the mutations has been created.
* @param batchManager
* @return
*/
public Builder withMutationBatchManager(MutationBatchManager batchManager) {
this.batchManager = batchManager;
this.autoCommit = false;
return this;
}
public Builder withKeyPrefix(String prefix) {
this.prefix = StringSerializer.get().toByteBuffer(prefix);
return this;
}
@SuppressWarnings("unchecked")
public CompositeEntityManager build() {
// check mandatory fields
Preconditions.checkNotNull(clazz, "withEntityType(...) is not set");
Preconditions.checkNotNull(keyspace, "withKeyspace(...) is not set");
// TODO: check @Id type compatibility
// TODO: do we need to require @Entity annotation
this.entityMapper = new CompositeEntityMapper(clazz, ttl, prefix);
this.lifecycleHandler = new LifecycleEvents(clazz);
if (columnFamily == null) {
if (columnFamilyName == null)
columnFamilyName = entityMapper.getEntityName();
columnFamily = new ColumnFamily(
columnFamilyName,
(com.netflix.astyanax.Serializer)MappingUtils.getSerializerForField(this.entityMapper.getId()),
ByteBufferSerializer.get());
}
if (batchManager == null) {
batchManager = new ThreadLocalMutationBatchManager(this.keyspace, this.writeConsistency, this.retryPolicy);
}
// build object
return new CompositeEntityManager(this);
}
}
public static Builder builder() {
return new Builder();
}
private final Keyspace keyspace;
private final CompositeEntityMapper entityMapper;
private final RetryPolicy retryPolicy;
private final LifecycleEvents lifecycleHandler;
private final boolean autoCommit;
private final ColumnFamily columnFamily;
private final ConsistencyLevel readConsitency;
private final MutationBatchManager batchManager;
private final boolean verbose;
public CompositeEntityManager(Builder builder) {
entityMapper = builder.entityMapper;
keyspace = builder.keyspace;
columnFamily = builder.columnFamily;
readConsitency = builder.readConsitency;
retryPolicy = builder.retryPolicy;
lifecycleHandler = builder.lifecycleHandler;
autoCommit = builder.autoCommit;
batchManager = builder.batchManager;
verbose = builder.verbose;
}
//////////////////////////////////////////////////////////////////
// public APIs
/**
* @inheritDoc
*/
public void put(T entity) throws PersistenceException {
try {
if (verbose)
LOG.info(String.format("%s : Adding entity '%s'", columnFamily.getName(), entity));
lifecycleHandler.onPrePersist(entity);
MutationBatch mb = getMutationBatch();
entityMapper.fillMutationBatch(mb, columnFamily, entity);
if (autoCommit)
mb.execute();
lifecycleHandler.onPostPersist(entity);
} catch(Exception e) {
throw new PersistenceException("failed to put entity ", e);
}
}
/**
* @inheritDoc
*/
public T get(K id) throws PersistenceException {
throw new UnsupportedOperationException("Call newNativeQuery().withId().equal({id}) instead");
}
/**
* @inheritDoc
*/
@Override
public void delete(K id) throws PersistenceException {
try {
if (verbose)
LOG.info(String.format("%s : Deleting id '%s'", columnFamily.getName(), id));
MutationBatch mb = getMutationBatch();
mb.withRow(columnFamily, id).delete();
if (autoCommit)
mb.execute();
} catch(Exception e) {
throw new PersistenceException("failed to delete entity " + id, e);
}
}
@Override
public void remove(T entity) throws PersistenceException {
K id = null;
try {
if (verbose)
LOG.info(String.format("%s : Removing entity '%s'", columnFamily.getName(), entity));
lifecycleHandler.onPreRemove(entity);
id = entityMapper.getEntityId(entity);
MutationBatch mb = getMutationBatch();
entityMapper.fillMutationBatchForDelete(mb, columnFamily, entity);
if (autoCommit)
mb.execute();
lifecycleHandler.onPostRemove(entity);
} catch(Exception e) {
throw new PersistenceException("failed to delete entity " + id, e);
}
}
/**
* @inheritDoc
*/
@Override
public List getAll() throws PersistenceException {
final List entities = Lists.newArrayList();
visitAll(new Function() {
@Override
public synchronized Boolean apply(T entity) {
entities.add(entity);
try {
lifecycleHandler.onPostLoad(entity);
} catch (Exception e) {
// TODO
}
return true;
}
});
return entities;
}
/**
* @inheritDoc
*/
@Override
public List get(Collection ids) throws PersistenceException {
try {
if (verbose)
LOG.info(String.format("%s : Reading entities '%s'", columnFamily.getName(), ids.toString()));
// Query for rows
ColumnFamilyQuery cfq = newQuery();
return convertRowsToEntities(cfq.getRowSlice(ids).execute().getResult());
} catch(Exception e) {
throw new PersistenceException("failed to get entities " + ids, e);
}
}
private List convertRowsToEntities(Rows rows) throws Exception {
List entities = Lists.newArrayList();
for (Row row : rows) {
ColumnList cl = row.getColumns();
// when a row is deleted in cassandra,
// the row key remains (without any columns) until the next compaction.
// simply return null (as non exist)
if (!cl.isEmpty()) {
for (Column column : cl) {
T entity = entityMapper.constructEntity(row.getKey(), column);
lifecycleHandler.onPostLoad(entity);
entities.add(entity);
}
}
}
return entities;
}
/**
* @inheritDoc
*/
@Override
public void delete(Collection ids) throws PersistenceException {
MutationBatch mb = getMutationBatch();
try {
if (verbose)
LOG.info(String.format("%s : Delete ids '%s'", columnFamily.getName(), ids.toString()));
for (K id : ids) {
mb.withRow(columnFamily, id).delete();
}
if (autoCommit)
mb.execute();
} catch(Exception e) {
throw new PersistenceException("failed to delete entities " + ids, e);
}
}
@Override
public void remove(Collection entities) throws PersistenceException {
MutationBatch mb = getMutationBatch();
try {
for (T entity : entities) {
lifecycleHandler.onPreRemove(entity);
if (verbose)
LOG.info(String.format("%s : Deleting '%s'", columnFamily.getName(), entity));
entityMapper.fillMutationBatchForDelete(mb, columnFamily, entity);
}
mb.execute();
for (T entity : entities) {
lifecycleHandler.onPostRemove(entity);
}
} catch(Exception e) {
throw new PersistenceException("failed to delete entities ", e);
}
}
/**
* @inheritDoc
*/
@Override
public void put(Collection entities) throws PersistenceException {
MutationBatch mb = getMutationBatch();
try {
for (T entity : entities) {
lifecycleHandler.onPrePersist(entity);
if (verbose)
LOG.info(String.format("%s : Writing '%s'", columnFamily.getName(), entity));
entityMapper.fillMutationBatch(mb, columnFamily, entity);
}
if (autoCommit)
mb.execute();
for (T entity : entities) {
lifecycleHandler.onPostPersist(entity);
}
} catch(Exception e) {
throw new PersistenceException("failed to put entities ", e);
}
}
/**
* @inheritDoc
*/
@Override
public void visitAll(final Function callback) throws PersistenceException {
try {
new AllRowsReader.Builder(keyspace, columnFamily)
.withIncludeEmptyRows(false)
.forEachRow(new Function, Boolean>() {
@Override
public Boolean apply(Row row) {
if (row.getColumns().isEmpty())
return true;
for (Column column : row.getColumns()) {
T entity = (T) entityMapper.constructEntity(row.getKey(), column);
try {
lifecycleHandler.onPostLoad(entity);
} catch (Exception e) {
// TODO:
}
if (!callback.apply(entity))
return false;
}
return true;
}
})
.build()
.call();
} catch (Exception e) {
throw new PersistenceException("Failed to fetch all entites", e);
}
}
@Override
public List find(String cql) throws PersistenceException {
Preconditions.checkArgument(StringUtils.left(cql, 6).equalsIgnoreCase("SELECT"), "CQL must be SELECT statement");
try {
CqlResult results = newQuery().withCql(cql).execute().getResult();
List entities = Lists.newArrayListWithExpectedSize(results.getRows().size());
for (Row row : results.getRows()) {
if (!row.getColumns().isEmpty()) {
T entity = entityMapper.constructEntityFromCql(row.getColumns());
lifecycleHandler.onPostLoad(entity);
entities.add(entity);
}
}
return entities;
} catch (Exception e) {
throw new PersistenceException("Failed to execute cql query", e);
}
}
private MutationBatch getMutationBatch() {
return batchManager.getSharedMutationBatch();
}
private ColumnFamilyQuery newQuery() {
ColumnFamilyQuery cfq = keyspace.prepareQuery(columnFamily);
if(readConsitency != null)
cfq.setConsistencyLevel(readConsitency);
if(retryPolicy != null)
cfq.withRetryPolicy(retryPolicy);
return cfq;
}
@Override
public void createStorage(Map options) throws PersistenceException {
try {
Properties props = new Properties();
props.put("key_validation_class", this.entityMapper.getKeyType());
props.put("default_validation_class", this.entityMapper.getValueType());
props.put("comparator_type", this.entityMapper.getComparatorType());
props.put("name", this.columnFamily.getName());
LOG.info("Creating column family : " + props.toString());
keyspace.createColumnFamily(props);
} catch (ConnectionException e) {
throw new PersistenceException("Unable to create column family " + this.columnFamily.getName(), e);
}
}
@Override
public void deleteStorage() throws PersistenceException {
try {
LOG.info(String.format("%s : Deleting storage", columnFamily.getName()));
keyspace.dropColumnFamily(this.columnFamily);
} catch (ConnectionException e) {
throw new PersistenceException("Unable to drop column family " + this.columnFamily.getName(), e);
}
}
@Override
public void truncate() throws PersistenceException {
try {
LOG.info(String.format("%s : Truncating", columnFamily.getName()));
keyspace.truncateColumnFamily(this.columnFamily);
} catch (ConnectionException e) {
throw new PersistenceException("Unable to drop column family " + this.columnFamily.getName(), e);
}
}
@Override
public void commit() throws PersistenceException {
if (verbose)
LOG.info(String.format("%s : Commit mutation", columnFamily.getName()));
MutationBatch mb = getMutationBatch();
if (mb != null) {
try {
mb.execute();
} catch (ConnectionException e) {
throw new PersistenceException("Failed to commit mutation batch", e);
}
}
else {
if (verbose)
LOG.info(String.format("%s : Nothing to commit", columnFamily.getName()));
}
}
@Override
public NativeQuery createNativeQuery() {
return new NativeQuery() {
@Override
public T getSingleResult() throws PersistenceException {
return Iterables.getFirst(getResultSet(), null);
}
@Override
public Collection getResultSet() throws PersistenceException {
Preconditions.checkArgument(!ids.isEmpty(), "Must specify at least one row key (ID) to fetch");
// if (verbose)
// LOG.info(String.format("%s : Query ids '%s' with predicates '%s'", columnFamily.getName(), ids, predicates));
RowSliceQuery rowQuery = prepareQuery();
try {
List entities = convertRowsToEntities(rowQuery.execute().getResult());
// if (verbose)
// LOG.info(String.format("%s : Query ids '%s' with predicates '%s' result='%s'", columnFamily.getName(), ids, predicates, entities));
return entities;
} catch (Exception e) {
throw new PersistenceException("Error executing query", e);
}
}
@Override
public Map> getResultSetById() throws Exception {
Map> result = Maps.newLinkedHashMap();
for (T entity : getResultSet()) {
K id = (K)entityMapper.idMapper.getValue(entity);
Collection children = result.get(id);
if (children == null) {
children = Lists.newArrayListWithCapacity(1);
result.put(id, children);
}
children.add(entity);
}
return result;
}
@Override
public Map getResultSetCounts() throws Exception {
Preconditions.checkArgument(!ids.isEmpty(), "Must specify at least one row key (ID) to fetch");
// if (verbose)
// LOG.info(String.format("%s : Query ids '%s' with predicates '%s'", columnFamily.getName(), ids, predicates));
RowSliceQuery rowQuery = prepareQuery();
try {
Map counts = rowQuery.getColumnCounts().execute().getResult();
// if (verbose)
// LOG.info(String.format("%s : Query ids '%s' with predicates '%s' result='%s'", columnFamily.getName(), ids, predicates, counts));
return counts;
} catch (Exception e) {
throw new PersistenceException("Error executing query", e);
}
}
private RowSliceQuery prepareQuery() {
RowSliceQuery rowQuery = keyspace.prepareQuery(columnFamily).setConsistencyLevel(readConsitency)
.getRowSlice(ids);
if (predicates != null && !predicates.isEmpty()) {
ByteBuffer[] endpoints = entityMapper.getQueryEndpoints(predicates);
rowQuery = rowQuery.withColumnRange(
new RangeBuilder()
.setStart(endpoints[0])
.setEnd(endpoints[1])
.setLimit(columnLimit)
.build());
}
return rowQuery;
}
};
}
}