All Downloads are FREE. Search and download functionalities are using the official Maven repository.

info.archinnov.achilles.dao.ThriftAbstractDao Maven / Gradle / Ivy

package info.archinnov.achilles.dao;

import static info.archinnov.achilles.logger.ThriftLoggerHelper.format;
import static me.prettyprint.hector.api.factory.HFactory.*;
import info.archinnov.achilles.consistency.AchillesConsistencyLevelPolicy;
import info.archinnov.achilles.context.execution.SafeExecutionContext;
import info.archinnov.achilles.counter.AchillesCounter;
import info.archinnov.achilles.entity.metadata.PropertyMeta;
import info.archinnov.achilles.iterator.ThriftCounterSliceIterator;
import info.archinnov.achilles.iterator.ThriftJoinSliceIterator;
import info.archinnov.achilles.iterator.ThriftSliceIterator;
import info.archinnov.achilles.serializer.ThriftSerializerTypeInferer;
import info.archinnov.achilles.serializer.ThriftSerializerUtils;
import org.apache.cassandra.utils.Pair;
import info.archinnov.achilles.validation.Validator;
import java.util.List;
import me.prettyprint.cassandra.model.HCounterColumnImpl;
import me.prettyprint.cassandra.model.thrift.ThriftCounterColumnQuery;
import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.Serializer;
import me.prettyprint.hector.api.beans.Composite;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.beans.HCounterColumn;
import me.prettyprint.hector.api.beans.Rows;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.Mutator;
import me.prettyprint.hector.api.query.CounterQuery;
import me.prettyprint.hector.api.query.SliceCounterQuery;
import me.prettyprint.hector.api.query.SliceQuery;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;

/**
 * ThriftAbstractDao
 * 
 * @author DuyHai DOAN
 * 
 */
public abstract class ThriftAbstractDao
{
    public static final String LOGGER_NAME = "ACHILLES_DAO";
    private static final Logger log = LoggerFactory.getLogger(LOGGER_NAME);

    protected Keyspace keyspace;
    protected Cluster cluster;
    protected Serializer columnNameSerializer;
    protected String columnFamily;
    protected AchillesConsistencyLevelPolicy policy;
    protected Pair rowkeyAndValueClasses;

    public static int DEFAULT_LENGTH = 100;

    protected ThriftAbstractDao() {
    }

    protected ThriftAbstractDao(Cluster cluster, Keyspace keyspace, String cf,
            AchillesConsistencyLevelPolicy policy,
            Pair rowkeyAndValueClasses)
    {
        Validator.validateNotNull(cluster, "Cluster should not be null");
        Validator.validateNotNull(keyspace, "keyspace should not be null");
        Validator.validateNotNull(keyspace, "policy should not be null");
        this.cluster = cluster;
        this.keyspace = keyspace;
        this.columnFamily = cf;
        this.policy = policy;
        this.rowkeyAndValueClasses = rowkeyAndValueClasses;
    }

    private  T reinitConsistencyLevels(SafeExecutionContext context)
    {
        log.trace("Execute safely and reinit consistency level in thread {}",
                Thread.currentThread());
        try
        {
            return context.execute();
        } finally
        {
            this.policy.reinitDefaultConsistencyLevels();
        }
    }

    protected  Function, V> getHColumnToValueFn()
    {
        return new Function, V>()

        {
            @Override
            public V apply(HColumn hColumn)
            {
                return hColumn.getValue();
            }
        };
    }

    private  Function, Pair> getHColumnToPairFn()
    {
        return new Function, Pair>()
        {
            @Override
            public Pair apply(HColumn hColumn)
            {
                return Pair.create(hColumn.getName(), hColumn.getValue());
            }
        };
    }

    public  void insertColumnBatch(K key, Composite name, V value, Optional ttlO,
            Mutator mutator)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Insert column {} into column family {} with key {}", format(name),
                    columnFamily, key);
        }

        HColumn column;
        if (ttlO.isPresent())
        {
            column = HFactory.createColumn(name, value, ttlO.get(), columnNameSerializer,
                    this. valSrz());
        }
        else
        {
            column = HFactory.createColumn(name, value, columnNameSerializer, this. valSrz());
        }
        mutator.addInsertion(key, columnFamily, column);
    }

    public  V getValue(final K key, final Composite name)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Get value from column family {} with key {} and column name {}",
                    columnFamily, key,
                    format(name));
        }

        V result = null;
        HColumn column = getColumn(key, name);
        if (column != null)
        {
            result = column.getValue();
        }
        return result;
    }

    public  HColumn getColumn(final K key, final Composite name)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Get column from column family {} with key {} and column name {}",
                    columnFamily, key,
                    format(name));
        }

        this.policy.loadConsistencyLevelForRead(columnFamily);
        return reinitConsistencyLevels(new SafeExecutionContext>()
        {
            @Override
            public HColumn execute()
            {
                return HFactory
                        .createColumnQuery(keyspace, ThriftAbstractDao.this. rowSrz(),
                                columnNameSerializer,
                                ThriftAbstractDao.this. valSrz())
                        .setColumnFamily(columnFamily)
                        .setKey(key)
                        .setName(name)
                        .execute()
                        .get();
            }
        });
    }

    public  void setValue(K key, Composite name, V value)
    {
        log.trace("Set value {} to column family {} with key {} , column name {}", value,
                columnFamily, key, name);

        Mutator mutator = HFactory.createMutator(keyspace, this. rowSrz());
        this.setValueBatch(key, name, value, Optional. absent(), mutator);
        this.executeMutator(mutator);
    }

    public  void setValueBatch(K key, Composite name, V value, Optional ttlO,
            Mutator mutator)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace("Set value {} as batch mutation to column family {} with key {} , column name {} and ttl {}",
                            value, columnFamily, key, format(name), ttlO);
        }
        HColumn column;
        if (ttlO.isPresent())
        {
            column = HFactory.createColumn(name, value, ttlO.get(), columnNameSerializer,
                    this. valSrz());
        }
        else
        {
            column = HFactory.createColumn(name, value, columnNameSerializer, this. valSrz());
        }
        mutator.addInsertion(key, columnFamily, column);
    }

    public  void removeColumnBatch(K key, Composite name, Mutator mutator)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Remove column name {} as batch mutation from column family {} with key {} ",
                    format(name),
                    columnFamily, key);
        }
        mutator.addDeletion(key, columnFamily, name, columnNameSerializer);
    }

    public  void removeColumnRangeBatch(K key, Composite start, Composite end,
            Mutator mutator)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Remove column slice within range having inclusive start/end {}/{} column names as batch mutation from column family {} with key {} ",
                            format(start), format(end), columnFamily, key);
        }
        this.removeColumnRangeBatch(key, start, end, false, Integer.MAX_VALUE, mutator);
    }

    public  void removeColumnRangeBatch(K key, Composite start, Composite end,
            boolean reverse, int count,
            Mutator mutator)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Remove {} columns slice within range having inclusive start/end {}/{} column names as batch mutation from column family {} with key {} and reserver {}",
                            count, format(start), format(end), columnFamily, key, reverse);
        }
        List> columns = createSliceQuery(keyspace, this. rowSrz(),
                columnNameSerializer,
                this. valSrz())
                .setColumnFamily(columnFamily)
                .setKey(key)
                .setRange(start, end, reverse, count)
                .execute()
                .get()
                .getColumns();

        for (HColumn column : columns)
        {
            mutator.addDeletion(key, columnFamily, column.getName(), columnNameSerializer);
        }
    }

    public  List findValuesRange(final K key, final Composite start, final Composite end,
            final boolean reverse, final int count)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Find {} values slice within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {}",
                            count, format(start), format(end), columnFamily, key, reverse);
        }
        this.policy.loadConsistencyLevelForRead(columnFamily);
        List> columns = reinitConsistencyLevels(new SafeExecutionContext>>()
        {
            @Override
            public List> execute()
            {
                return createSliceQuery(keyspace, ThriftAbstractDao.this. rowSrz(),
                        columnNameSerializer,
                        ThriftAbstractDao.this. valSrz())
                        .setColumnFamily(columnFamily)
                        .setKey(key)
                        .setRange(start, end, reverse, count)
                        .execute()
                        .get()
                        .getColumns();
            }
        });
        return Lists.transform(columns, this. getHColumnToValueFn());
    }

    public  List> findColumnsRange(final K key, final Composite start,
            final Composite end,
            final boolean reverse, final int count)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Find {} columns slice within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {}",
                            count, format(start), format(end), columnFamily, key, reverse);
        }
        this.policy.loadConsistencyLevelForRead(columnFamily);
        List> columns = reinitConsistencyLevels(new SafeExecutionContext>>()
        {
            @Override
            public List> execute()
            {
                return createSliceQuery(keyspace, ThriftAbstractDao.this. rowSrz(),
                        columnNameSerializer,
                        ThriftAbstractDao.this. valSrz())
                        .setColumnFamily(columnFamily)
                        .setKey(key)
                        .setRange(start, end, reverse, count)
                        .execute()
                        .get()
                        .getColumns();
            }
        });
        return Lists.transform(columns, this. getHColumnToPairFn());
    }

    public  List> findRawColumnsRange(final K key,
            final Composite start,
            final Composite end, final int count, final boolean reverse)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Find raw {} columns slice within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {}",
                            count, format(start), format(end), columnFamily, key, reverse);
        }

        this.policy.loadConsistencyLevelForRead(columnFamily);
        return reinitConsistencyLevels(new SafeExecutionContext>>()
        {
            @Override
            public List> execute()
            {
                List> columns = createSliceQuery(keyspace, ThriftAbstractDao.this. rowSrz(),
                        columnNameSerializer,
                        ThriftAbstractDao.this. valSrz())
                        .setColumnFamily(columnFamily)
                        .setKey(key)
                        .setRange(start, end, reverse, count)
                        .execute()
                        .get()
                        .getColumns();

                return columns;
            }
        });
    }

    public  List> findCounterColumnsRange(final K key,
            final Composite start,
            final Composite end, final int count, final boolean reverse)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Find {} counter columns slice within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {}",
                            count, format(start), format(end), columnFamily, key, reverse);
        }

        this.policy.loadConsistencyLevelForRead(columnFamily);
        return reinitConsistencyLevels(new SafeExecutionContext>>()
        {
            @Override
            public List> execute()
            {
                return HFactory
                        .createCounterSliceQuery(keyspace, ThriftAbstractDao.this. rowSrz(),
                                columnNameSerializer)
                        .setColumnFamily(columnFamily)
                        .setKey(key)
                        .setRange(start, end, reverse, count)
                        .execute()
                        .get()
                        .getColumns();
            }
        });
    }

    public  ThriftSliceIterator getColumnsIterator(K key, Composite start,
            Composite end,
            boolean reverse, int length)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Get columns slice iterator within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {} by batch of {} elements",
                            format(start), format(end), columnFamily, key, reverse, length);
        }

        SliceQuery query = createSliceQuery(keyspace,
                ThriftAbstractDao.this. rowSrz(),
                columnNameSerializer, ThriftAbstractDao.this. valSrz()).setColumnFamily(
                columnFamily).setKey(key);

        return new ThriftSliceIterator(policy, columnFamily, query, start, end, reverse,
                length);
    }

    public  ThriftCounterSliceIterator getCounterColumnsIterator(K key, Composite start,
            Composite end,
            boolean reverse, int length)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Get counter columns slice iterator within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {} by batch of {} elements",
                            format(start), format(end), columnFamily, key, reverse, length);
        }

        this.policy.loadConsistencyLevelForRead(columnFamily);
        SliceCounterQuery query = createCounterSliceQuery(keyspace,
                this. rowSrz(),
                columnNameSerializer).setColumnFamily(columnFamily).setKey(key);

        return new ThriftCounterSliceIterator(policy, columnFamily, query, start, end, reverse,
                length);
    }

    public  ThriftJoinSliceIterator getJoinColumnsIterator(
            ThriftGenericEntityDao joinEntityDao, PropertyMeta propertyMeta, K key,
            Composite start,
            Composite end, boolean reversed, int count)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Get join columns iterator within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {} by batch of {} elements; for property {}",
                            format(start), format(end), columnFamily, key, reversed, count,
                            propertyMeta.getPropertyName());
        }

        SliceQuery query = createSliceQuery(keyspace, this. rowSrz(),
                columnNameSerializer,
                this. valSrz()).setColumnFamily(columnFamily).setKey(key);

        return new ThriftJoinSliceIterator(policy, joinEntityDao, columnFamily,
                propertyMeta, query,
                start, end, reversed, count);
    }

    public  Rows multiGetSliceRange(final List keys,
            final Composite start,
            final Composite end, final boolean reverse, final int size)
    {
        if (log.isTraceEnabled())
        {
            log
                    .trace(
                            "Multi get columns slice within range having inclusive start/end {}/{} column names from column family {} with key {} and reverse {} by batch of {} elements; for property {}",
                            format(start), format(end), columnFamily, StringUtils.join(keys, ","),
                            reverse, size);
        }

        this.policy.loadConsistencyLevelForRead(columnFamily);
        return reinitConsistencyLevels(new SafeExecutionContext>()
        {
            @Override
            public Rows execute()
            {
                return HFactory
                        .createMultigetSliceQuery(keyspace, ThriftAbstractDao.this. rowSrz(),
                                columnNameSerializer, ThriftAbstractDao.this. valSrz())
                        .setColumnFamily(columnFamily)
                        .setKeys(keys)
                        .setRange(start, end, reverse, size)
                        .execute()
                        .get();
            }
        });
    }

    public  void removeRowBatch(K key, Mutator mutator)
    {
        log.trace("Remove row as batch mutation from column family {} with key {}", columnFamily,
                key);

        mutator.addDeletion(key, columnFamily);
    }

    public  void incrementCounter(K key, Composite name, Long value)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Incrementing counter column {} with key {} from column family {} by {}",
                    format(name), key,
                    columnFamily, value);
        }
        Mutator mutator = buildMutator();
        mutator.addCounter(key, columnFamily, new HCounterColumnImpl(name, value,
                ThriftSerializerUtils.COMPOSITE_SRZ));
        executeMutator(mutator);
    }

    public  void decrementCounter(K key, Composite name, Long value)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Decrementing counter column {} with key {} from column family {} by {}",
                    format(name), key,
                    columnFamily, value);
        }
        Mutator mutator = buildMutator();
        mutator.addCounter(key, columnFamily, new HCounterColumnImpl(name, value * -1L,
                ThriftSerializerUtils.COMPOSITE_SRZ));
        executeMutator(mutator);
    }

    public  long getCounterValue(K key, Composite name)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Get counter value column {} with key {} from column family {}",
                    format(name), key,
                    columnFamily);
        }

        long counterValue = 0;
        HCounterColumn counterColumn = getCounterColumn(key, name);
        if (counterColumn != null)
        {
            counterValue = counterColumn.getValue();
        }

        return counterValue;
    }

    public  HCounterColumn getCounterColumn(K key, Composite name)
    {
        if (log.isTraceEnabled())
        {
            log.trace("Get counter  column {} with key {} from column family {}", format(name),
                    key, columnFamily);
        }

        final CounterQuery counter = new ThriftCounterColumnQuery(
                keyspace,
                this. rowSrz(), columnNameSerializer)
                .setColumnFamily(columnFamily)
                .setKey(key)
                .setName(name);

        this.policy.loadConsistencyLevelForRead(columnFamily);
        return reinitConsistencyLevels(new SafeExecutionContext>()
        {
            @Override
            public HCounterColumn execute()
            {
                return counter.execute().get();
            }
        });
    }

    public  void removeCounterBatch(K key, Composite name, Mutator mutator)
    {
        if (log.isTraceEnabled())
        {
            log.trace(
                    "Remove counter column {} as batch mutation with key {} from column family {}",
                    format(name),
                    key, columnFamily);
        }

        mutator.deleteCounter(key, columnFamily, name, columnNameSerializer);
    }

    public  void removeCounterRowBatch(K key, Mutator mutator)
    {
        log.trace("Remove counter row as batch mutation with key {} from column family {}", key,
                columnFamily);

        SliceCounterQuery query = HFactory
                .createCounterSliceQuery(keyspace, this. rowSrz(), columnNameSerializer)
                .setColumnFamily(columnFamily).setKey(key);

        ThriftCounterSliceIterator iterator = new ThriftCounterSliceIterator(policy,
                columnFamily, query,
                (Composite) null, (Composite) null, false, DEFAULT_LENGTH);

        while (iterator.hasNext())
        {
            HCounterColumn counterCol = iterator.next();
            mutator.deleteCounter(key, columnFamily, counterCol.getName(), columnNameSerializer);
        }
    }

    public  void truncate()
    {
        cluster.truncate(keyspace.getKeyspaceName(), columnFamily);
    }

    public void truncateCounters()
    {
        cluster.truncate(keyspace.getKeyspaceName(), AchillesCounter.THRIFT_COUNTER_CF);
    }

    public  Mutator buildMutator()
    {
        return HFactory.createMutator(this.keyspace, this. rowSrz());
    }

    public  void executeMutator(final Mutator mutator)
    {
        log.trace("Execute safely mutator with {} mutations for column family {}",
                mutator.getPendingMutationCount(),
                columnFamily);

        this.policy.loadConsistencyLevelForWrite(this.columnFamily);
        reinitConsistencyLevels(new SafeExecutionContext()
        {
            @Override
            public Void execute()
            {
                mutator.execute();
                return null;
            }
        });
    }

    public String getColumnFamily()
    {
        return columnFamily;
    }

    protected  Serializer rowSrz()
    {
        return ThriftSerializerTypeInferer. getSerializer((Class) rowkeyAndValueClasses.left);
    }

    protected  Serializer valSrz()
    {
        return ThriftSerializerTypeInferer
                . getSerializer((Class) rowkeyAndValueClasses.right);
    }
}