All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.ebuddy.cassandra.dao.ColumnFamilyTemplate Maven / Gradle / Ivy

There is a newer version: 2.4.2
Show newest version
/*
 * Copyright 2013 eBuddy B.V.
 *
 *    Licensed under the Apache License, Version 2.0 (the "License");
 *    you may not use this file except in compliance with the License.
 *    You may obtain a copy of the License at
 *
 *        http://www.apache.org/licenses/LICENSE-2.0
 *
 *    Unless required by applicable law or agreed to in writing, software
 *    distributed under the License is distributed on an "AS IS" BASIS,
 *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *    See the License for the specific language governing permissions and
 *    limitations under the License.
 */

package com.ebuddy.cassandra.dao;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;

import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.ebuddy.cassandra.BatchContext;
import com.ebuddy.cassandra.dao.mapper.ColumnFamilyRowMapper;
import com.ebuddy.cassandra.dao.mapper.ColumnMapper;

import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.Serializer;
import me.prettyprint.hector.api.beans.ColumnSlice;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.beans.OrderedRows;
import me.prettyprint.hector.api.beans.Row;
import me.prettyprint.hector.api.beans.Rows;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.Mutator;
import me.prettyprint.hector.api.query.ColumnQuery;
import me.prettyprint.hector.api.query.MultigetSliceQuery;
import me.prettyprint.hector.api.query.QueryResult;
import me.prettyprint.hector.api.query.RangeSlicesQuery;
import me.prettyprint.hector.api.query.SliceQuery;

/**
 * A Data access template for accessing a (regular) Column Family.
 *
 * @param  the type of the row keys
 * @param  the type of the column names
 * @param  the type of the column values
 * @author Eric Zoerner [email protected]
 */
public class ColumnFamilyTemplate extends AbstractColumnFamilyTemplate
        implements ColumnFamilyOperations {
    private static final Logger LOG = LoggerFactory.getLogger(ColumnFamilyTemplate.class);

    private final ColumnMapper columnMapperToGetColumnNames = new ColumnMapper() {
        @Override
        public N mapColumn(N columnName, V columnValue) {
            return columnName;
        }
    };

    public ColumnFamilyTemplate(Keyspace keyspace,
                                Serializer keySerializer,
                                Serializer columnNameSerializer,
                                Serializer valueSerializer) {
        this(keyspace, null, keySerializer, columnNameSerializer, valueSerializer);
    }

    public ColumnFamilyTemplate(Keyspace keyspace,
                                @Nullable String defaultColumnFamily,
                                Serializer keySerializer,
                                Serializer columnNameSerializer,
                                Serializer valueSerializer) {
        super(keyspace,
              defaultColumnFamily,
              keySerializer,
              Validate.notNull(columnNameSerializer),
              Validate.notNull(valueSerializer));
    }

    /**
     * Read a column value.
     *
     * @param rowKey     the row key of type K
     * @param columnName the column name
     * @return the column value or null if not found
     */
    @Override
    public V readColumnValue(K rowKey, N columnName) {
        ColumnQuery query = HFactory.createColumnQuery(getKeyspace(),
                                                              getKeySerializer(),
                                                              getColumnNameSerializer(),
                                                              getValueSerializer());
        QueryResult> result = query.
                setKey(rowKey).
                setColumnFamily(getColumnFamily()).
                setName(columnName).
                execute();
        HColumn column = result.get();
        return column != null ? column.getValue() : null;
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
    }

    /**
     * Read the columns as a map from a single row.
     *
     * @param rowKey the row key of type K
     * @return sorted map of columns, key is type N and values are type V.
     */
    @Override
    public Map readColumnsAsMap(K rowKey) {
        return readColumnsAsMap(rowKey, null, null, ALL, false);
    }

    /**
     * Read the columns as a map from a single row specifying start, finish, count, and reversed.
     *
     *
     * @param rowKey the row key of type K
     * @return map of columns, key is type N and values are type V.
     */
    @Override
    public Map readColumnsAsMap(K rowKey, N start, N finish, int count, boolean reversed) {
        Map maps = new HashMap();
        SliceQuery query = HFactory.createSliceQuery(getKeyspace(),
                                                            getKeySerializer(),
                                                            getColumnNameSerializer(),
                                                            getValueSerializer());
        QueryResult> result = query.setKey(rowKey).
                setColumnFamily(getColumnFamily()).
                setRange(start, finish, reversed, count).
                execute();
        ColumnSlice slice = result.get();

        for (HColumn column : slice.getColumns()) {
            maps.put(column.getName(),
                     column.getValue());
        }
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
        return maps;
    }

    @Override
    public  List readColumns(K rowKey, ColumnMapper  columnMapper) {
        return readColumns(rowKey, null, null, ALL, false, columnMapper);
    }

    @Override
    public  List readColumns(K rowKey,
                                   N start,
                                   N finish,
                                   int count,
                                   boolean reversed,
                                   ColumnMapper columnMapper) {
        List resultList = new ArrayList();

        SliceQuery query = HFactory.createSliceQuery(getKeyspace(),
                                                            getKeySerializer(),
                                                            getColumnNameSerializer(),
                                                            getValueSerializer());
        QueryResult> result = query.setKey(rowKey).
                setColumnFamily(getColumnFamily()).
                setRange(start, finish, reversed, count).
                execute();
        ColumnSlice slice = result.get();

        for (HColumn column : slice.getColumns()) {
            resultList.add(columnMapper.mapColumn(column.getName(), column.getValue()));
        }
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
        return resultList;
    }

    /**
     * Read all columns from multiple rows.
     *
     * @param rowKeys a collection of rows to read
     */
    @Override
    public Map> multiGetAsMap(Iterable rowKeys) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Calling multiGetAsMap with rowKeys: " + rowKeys);
        }
        return basicMultiGetAsMap(rowKeys, null);
    }

    /**
     * Read specific columns from multiple rows.
     *
     * @param rowKeys     a collection of rows to read
     * @param columnNames names of the columns; if no columns are passed in then
     *                    just the keys are returned with no column data
     */
    @Override
    public Map> multiGetColumnsAsMap(Iterable rowKeys, N... columnNames) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Calling multiGetColumnsAsMap with rowKeys: " + rowKeys
                              + " and columnNames: " + Arrays.asList(columnNames));
        }
        return basicMultiGetAsMap(rowKeys, columnNames);
    }

    @Override
    public Map> readRowsAsMap() {
        Map> resultMap = new HashMap>();
        RangeSlicesQuery rangeSlicesQuery = HFactory.createRangeSlicesQuery(getKeyspace(),
                                                                                     getKeySerializer(),
                                                                                     getColumnNameSerializer(),
                                                                                     getValueSerializer());
        rangeSlicesQuery.setColumnFamily(getColumnFamily());
        rangeSlicesQuery.setRange(null, null, false, ALL);
        rangeSlicesQuery.setRowCount(ALL);
        QueryResult> result = rangeSlicesQuery.execute();
        for (Row row : result.get()) {
            K key = row.getKey();
            ColumnSlice slice = row.getColumnSlice();
            Map columns = new HashMap();
            for (HColumn column : slice.getColumns()) {
                V value = column.getValue();
                columns.put(column.getName(), value);
            }
            resultMap.put(key, columns);
        }

        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed

        if (LOG.isDebugEnabled()) {
            LOG.debug("Returning result from multiGetColumnsAsMap: " + resultMap);
        }
        return resultMap;
    }

    /**
     * Read all columns from multiple rows using a mapper for the result.
     *
     * @param rowKeys a collection of rows to read
     */
    @Override
    public  List multiGet(Iterable rowKeys, ColumnFamilyRowMapper rowMapper) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Calling multiGetAsMap with rowKeys: " + rowKeys);
        }
        return basicMultiGet(rowKeys, rowMapper, null);
    }

    /**
     * Read specific columns from multiple rows using a mapper for the result.
     *
     * @param rowKeys     a collection of rows to read
     * @param columnNames names of the columns; if no columns are passed in then
     *                    just the keys are returned with no column data
     */
    @Override
    public  List multiGetColumns(Iterable rowKeys, ColumnFamilyRowMapper rowMapper,
                                       N... columnNames) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Calling multiGetColumnsAsMap with rowKeys: " + rowKeys
                              + " and columnNames: " + Arrays.asList(columnNames));
        }
        return basicMultiGet(rowKeys, rowMapper, columnNames);
    }

    /**
     * Write a column value immediately.
     *
     * @param rowKey      the row key of type K
     * @param columnName  the column name
     * @param columnValue the column value
     */
    @Override
    public void writeColumn(K rowKey, N columnName, V columnValue) {
        basicWriteColumn(rowKey, columnName, columnValue, 0, null, null);
    }

    /**
     * Write a column value immediately with a time to live.
     *
     * @param rowKey             the row key of type K
     * @param columnName         the column name
     * @param columnValue        the column value
     * @param timeToLive         a positive time to live
     * @param timeToLiveTimeUnit the time unit for timeToLive
     */
    @Override
    public void writeColumn(K rowKey, N columnName, V columnValue, long timeToLive, TimeUnit timeToLiveTimeUnit) {
        basicWriteColumn(rowKey, columnName, columnValue, timeToLive, timeToLiveTimeUnit, null);
    }


    /**
     * Write a column value as batch operation.
     *
     * @param rowKey      the row key of type K
     * @param columnName  the column name
     * @param columnValue the column value
     * @param batchContext  BatchContext
     */
    @Override
    public void writeColumn(K rowKey, N columnName, V columnValue, @Nonnull BatchContext batchContext) {
        Validate.notNull(batchContext);
        basicWriteColumn(rowKey, columnName, columnValue, 0, null, batchContext);
    }

    /**
     * Write a column value as batch operation with a time to live.
     *
     * @param rowKey             the row key of type K
     * @param columnName         the column name
     * @param columnValue        the column value
     * @param batchContext         BatchContext
     * @param timeToLive         a positive time to live
     * @param timeToLiveTimeUnit the time unit for timeToLive
     */
    @Override
    public void writeColumn(K rowKey,
                            N columnName,
                            V columnValue,
                            int timeToLive,
                            TimeUnit timeToLiveTimeUnit,
                            @Nonnull BatchContext batchContext) {
        Validate.notNull(batchContext);
        basicWriteColumn(rowKey, columnName, columnValue, timeToLive, timeToLiveTimeUnit, batchContext);
    }

    /**
     * Write multiple columns immediately from a map.
     */
    @Override
    public void writeColumns(K rowKey, Map map) {
        insertColumns(rowKey, map);
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
    }

    /**
     * Write multiple columns from a map as part of a batch operation.
     *
     * @param rowKey     the row key of type K
     * @param map        a map of columns with keys of column name type N and column values V.
     * @param batchContext optional BatchContext for batch operations
     */
    @Override
    public void writeColumns(K rowKey, Map map, @Nonnull BatchContext batchContext) {
        Validate.notNull(batchContext);
        Mutator mutator = validateAndGetMutator(batchContext);
        addInsertions(rowKey, map, mutator);
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
    }

    @Override
    public void deleteColumns(K rowKey, N... columnNames) {
        if (columnNames.length == 0) {
            return;
        }

        Mutator mutator = createMutator();
        if (columnNames.length == 1) {
            mutator.delete(rowKey, getColumnFamily(), columnNames[0], getColumnNameSerializer());
        } else {
            for (N columnName : columnNames) {
                mutator.addDeletion(rowKey, getColumnFamily(), columnName, getColumnNameSerializer());
            }
            mutator.execute();
        }
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
    }

    @Override
    public void deleteColumns(K rowKey, N start, N finish) {
        deleteColumns(rowKey, start, finish, null);
    }

    @Override
    public void deleteColumns(K rowKey, N start, N finish, @Nullable BatchContext batchContext) {
        Mutator mutator;
        boolean shouldExecute;
        if (batchContext == null) {
            shouldExecute = true;
            mutator = createMutator();
        } else {
            shouldExecute = false;
            mutator = validateAndGetMutator(batchContext);
        }

        // unfortunately the thrift API to Cassandra does not support deleting with a SliceRange.
        // !! We have read before delete -- performance and thread safety issue
        // get column names to delete using a slice query
        List columnNamesToDelete = readColumns(rowKey,
                                                  start,
                                                  finish,
                                                  ALL,
                                                  false,
                                                  columnMapperToGetColumnNames);
        for (N columnName : columnNamesToDelete) {
            mutator.addDeletion(rowKey, getColumnFamily(), columnName, getColumnNameSerializer());
        }

        if (shouldExecute) {
            mutator.execute();
        }
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
    }

    /**
     * Helper method to get multiple rows and return result in a Map.
     * @param rowKeys     The row keys to read.
     * @param columnNames if null then get all columns; otherwise, get only specified columns. If empty, then this
     *                    is a key-only query and only the keys are returned.
     * @return The column data
     */
    private Map> basicMultiGetAsMap(Iterable  rowKeys, @Nullable N[] columnNames) {

        Map> resultMap = new HashMap>();
        MultigetSliceQuery query = HFactory.createMultigetSliceQuery(getKeyspace(),
                                                                            getKeySerializer(),
                                                                            getColumnNameSerializer(),
                                                                            getValueSerializer());
        query.setKeys(rowKeys).
                setColumnFamily(getColumnFamily()).
                setRange(null, null, false, ALL);
        if (columnNames != null) {
            query.setColumnNames(columnNames);
        }
        QueryResult> result = query.execute();

        for (Row row : result.get()) {
            K key = row.getKey();
            ColumnSlice slice = row.getColumnSlice();
            Map columns = new HashMap();
            for (HColumn column : slice.getColumns()) {
                V value = column.getValue();
                columns.put(column.getName(), value);
            }
            resultMap.put(key, columns);
        }
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
        if (LOG.isDebugEnabled()) {
            LOG.debug("Returning result from multiGetColumnsAsMap: " + resultMap);
        }
        return resultMap;
    }

    /**
     * Helper method to get multiple rows using a row mapper.
     * @param rowKeys     The row keys to read.
     * @param columnNames if null then get all columns; otherwise, get only specified columns. If empty, then this
     *                    is a key-only query and only the keys are returned.
     * @return The column data
     */
    private  List basicMultiGet(Iterable  rowKeys,
                                      ColumnFamilyRowMapper rowMapper,
                                      @Nullable N[] columnNames) {

        List resultList = new ArrayList();

        MultigetSliceQuery query = HFactory.createMultigetSliceQuery(getKeyspace(),
                                                                            getKeySerializer(),
                                                                            getColumnNameSerializer(),
                                                                            getValueSerializer());
        query.setKeys(rowKeys).
                setColumnFamily(getColumnFamily()).
                setRange(null, null, false, ALL);
        if (columnNames != null) {
            query.setColumnNames(columnNames);
        }
        QueryResult> result = query.execute();

        for (Row row : result.get()) {
            K key = row.getKey();
            ColumnSlice slice = row.getColumnSlice();
            List> columns = new ArrayList>();
            for (HColumn column : slice.getColumns()) {
                columns.add(column);
            }
            resultList.add(rowMapper.mapRow(key, columns));
        }
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
        if (LOG.isDebugEnabled()) {
            LOG.debug("Returning result from multiGetColumnsAsMap: " + resultList);
        }
        return resultList;
    }

    /**
     * Write a column value, with option of doing batch operation if batchContext is provided.
     *
     * @param rowKey             the row key of type K
     * @param columnName         the column name
     * @param columnValue        the column value
     * @param timeToLive         a positive time to live in seconds, or
     *                           ignored if timeToLiveTimeUnit is null.
     * @param timeToLiveTimeUnit the time unit for timeToLive, or null if no time to live is specified.
     * @param batchContext         optional BatchContext for a batch operation
     */
    private void basicWriteColumn(K rowKey,
                                  N columnName,
                                  V columnValue,
                                  long timeToLive,
                                  @Nullable TimeUnit timeToLiveTimeUnit,
                                  @Nullable BatchContext batchContext) {
        Mutator mutator = validateAndGetMutator(batchContext);
        HColumn column;

        if (timeToLiveTimeUnit == null) {
            column = createColumn(columnName, columnValue);
        } else {
            Validate.notNull(timeToLiveTimeUnit);
            long timeToLiveInSeconds = TimeUnit.SECONDS.convert(timeToLive, timeToLiveTimeUnit);
            Validate.isTrue(timeToLiveInSeconds <= Integer.MAX_VALUE && timeToLiveInSeconds > 0,
                            "Invalid time to live, must be positive and " +
                                    "fit in an int when converted to seconds: " + timeToLiveInSeconds);
            column = HFactory.createColumn(columnName,
                                           columnValue,
                                           (int)timeToLiveInSeconds,
                                           getColumnNameSerializer(),
                                           getValueSerializer());
        }

        if (mutator == null) {
            createMutator().insert(rowKey, getColumnFamily(), column);
        } else {
            mutator.addInsertion(rowKey, getColumnFamily(), column);
        }
        // we used to translate hector exceptions into spring exceptions here, but spring dependency was removed
    }

    private void addInsertions(K rowKey, Map properties, Mutator mutator) {
        for (Map.Entry mapEntry : properties.entrySet()) {
            mutator.addInsertion(rowKey, getColumnFamily(), createColumn(mapEntry.getKey(), mapEntry.getValue()));
        }
    }

    private void insertColumns(K rowKey, Map properties) {
        Mutator mutator = createMutator();
        for (Map.Entry mapEntry : properties.entrySet()) {
            N key = mapEntry.getKey();
            V value = mapEntry.getValue();
            mutator.addInsertion(rowKey, getColumnFamily(), createColumn(key, value));
        }
        mutator.execute();
    }

    private HColumn createColumn(N key, V value) {
        return HFactory.createColumn(key, value, getColumnNameSerializer(), getValueSerializer());
    }

    private Serializer getColumnNameSerializer() {
        return getTopSerializer();
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy