org.usergrid.persistence.cassandra.CassandraService Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of usergrid-core Show documentation
Show all versions of usergrid-core Show documentation
Core services for Usergrid system.
/*******************************************************************************
* Copyright 2012 Apigee Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.usergrid.persistence.cassandra;
import static me.prettyprint.cassandra.service.FailoverPolicy.ON_FAIL_TRY_ALL_AVAILABLE;
import static me.prettyprint.hector.api.factory.HFactory.createColumn;
import static me.prettyprint.hector.api.factory.HFactory.createMultigetSliceQuery;
import static me.prettyprint.hector.api.factory.HFactory.createMutator;
import static me.prettyprint.hector.api.factory.HFactory.createRangeSlicesQuery;
import static me.prettyprint.hector.api.factory.HFactory.createSliceQuery;
import static me.prettyprint.hector.api.factory.HFactory.createVirtualKeyspace;
import static org.apache.commons.collections.MapUtils.getIntValue;
import static org.apache.commons.collections.MapUtils.getString;
import static org.usergrid.persistence.cassandra.ApplicationCF.ENTITY_ID_SETS;
import static org.usergrid.persistence.cassandra.CassandraPersistenceUtils.batchExecute;
import static org.usergrid.persistence.cassandra.CassandraPersistenceUtils.buildSetIdListMutator;
import static org.usergrid.utils.ConversionUtils.bytebuffer;
import static org.usergrid.utils.ConversionUtils.bytebuffers;
import static org.usergrid.utils.ConversionUtils.string;
import static org.usergrid.utils.ConversionUtils.uuid;
import static org.usergrid.utils.JsonUtils.mapToFormattedJsonString;
import static org.usergrid.utils.MapUtils.asMap;
import static org.usergrid.utils.MapUtils.filter;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import me.prettyprint.cassandra.connection.HConnectionManager;
import me.prettyprint.cassandra.model.ConfigurableConsistencyLevel;
import me.prettyprint.cassandra.serializers.ByteBufferSerializer;
import me.prettyprint.cassandra.serializers.BytesArraySerializer;
import me.prettyprint.cassandra.serializers.DynamicCompositeSerializer;
import me.prettyprint.cassandra.serializers.LongSerializer;
import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.cassandra.serializers.UUIDSerializer;
import me.prettyprint.cassandra.service.CassandraHostConfigurator;
import me.prettyprint.cassandra.service.ThriftKsDef;
import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.ConsistencyLevelPolicy;
import me.prettyprint.hector.api.HConsistencyLevel;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.Serializer;
import me.prettyprint.hector.api.beans.ColumnSlice;
import me.prettyprint.hector.api.beans.DynamicComposite;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.beans.OrderedRows;
import me.prettyprint.hector.api.beans.Row;
import me.prettyprint.hector.api.beans.Rows;
import me.prettyprint.hector.api.ddl.ColumnDefinition;
import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition;
import me.prettyprint.hector.api.ddl.KeyspaceDefinition;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.Mutator;
import me.prettyprint.hector.api.query.ColumnQuery;
import me.prettyprint.hector.api.query.CountQuery;
import me.prettyprint.hector.api.query.MultigetSliceQuery;
import me.prettyprint.hector.api.query.QueryResult;
import me.prettyprint.hector.api.query.RangeSlicesQuery;
import me.prettyprint.hector.api.query.SliceQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.usergrid.locking.LockManager;
import org.usergrid.persistence.IndexBucketLocator;
import org.usergrid.persistence.IndexBucketLocator.IndexType;
import org.usergrid.utils.JsonUtils;
public class CassandraService {
public static String SYSTEM_KEYSPACE = "Usergrid";
public static String STATIC_APPLICATION_KEYSPACE = "Usergrid_Applications";
public static final boolean USE_VIRTUAL_KEYSPACES = true;
public static final String APPLICATIONS_CF = "Applications";
public static final String PROPERTIES_CF = "Properties";
public static final String TOKENS_CF = "Tokens";
public static final String PRINCIPAL_TOKEN_CF = "PrincipalTokens";
public static final int DEFAULT_COUNT = 1000;
public static final int ALL_COUNT = 100000;
public static final int INDEX_ENTRY_LIST_COUNT = 1000;
public static final int DEFAULT_SEARCH_COUNT = 10000;
public static final int RETRY_COUNT = 5;
public static final String DEFAULT_APPLICATION = "default-app";
public static final String DEFAULT_ORGANIZATION = "usergrid";
public static final String MANAGEMENT_APPLICATION = "management";
public static final UUID MANAGEMENT_APPLICATION_ID = new UUID(0, 1);
public static final UUID DEFAULT_APPLICATION_ID = new UUID(0, 16);
private static final Logger logger = LoggerFactory
.getLogger(CassandraService.class);
private static final Logger db_logger = LoggerFactory
.getLogger(CassandraService.class.getPackage().getName() + ".DB");
Cluster cluster;
CassandraHostConfigurator chc;
Properties properties;
LockManager lockManager;
ConsistencyLevelPolicy consistencyLevelPolicy;
private Keyspace systemKeyspace;
private Map accessMap;
public static final StringSerializer se = new StringSerializer();
public static final ByteBufferSerializer be = new ByteBufferSerializer();
public static final UUIDSerializer ue = new UUIDSerializer();
public static final BytesArraySerializer bae = new BytesArraySerializer();
public static final DynamicCompositeSerializer dce = new DynamicCompositeSerializer();
public static final LongSerializer le = new LongSerializer();
public static final UUID NULL_ID = new UUID(0, 0);
public CassandraService(Properties properties, Cluster cluster,
CassandraHostConfigurator cassandraHostConfigurator,
LockManager lockManager) {
this.properties = properties;
this.cluster = cluster;
chc = cassandraHostConfigurator;
this.lockManager = lockManager;
db_logger.info("" + cluster.getKnownPoolHosts(false));
}
public void init() throws Exception {
if (consistencyLevelPolicy == null) {
consistencyLevelPolicy = new ConfigurableConsistencyLevel();
((ConfigurableConsistencyLevel) consistencyLevelPolicy)
.setDefaultReadConsistencyLevel(HConsistencyLevel.ONE);
}
accessMap = new HashMap(2);
accessMap.put("username", properties.getProperty("cassandra.username"));
accessMap.put("password", properties.getProperty("cassandra.password"));
systemKeyspace = HFactory.createKeyspace(SYSTEM_KEYSPACE, cluster,
consistencyLevelPolicy, ON_FAIL_TRY_ALL_AVAILABLE, accessMap);
}
public Cluster getCluster() {
return cluster;
}
public void setCluster(Cluster cluster) {
this.cluster = cluster;
}
public CassandraHostConfigurator getCassandraHostConfigurator() {
return chc;
}
public void setCassandraHostConfigurator(CassandraHostConfigurator chc) {
this.chc = chc;
}
public Properties getProperties() {
return properties;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public Map getPropertiesMap() {
if (properties != null) {
return asMap(properties);
}
return null;
}
public LockManager getLockManager() {
return lockManager;
}
public void setLockManager(LockManager lockManager) {
this.lockManager = lockManager;
}
public ConsistencyLevelPolicy getConsistencyLevelPolicy() {
return consistencyLevelPolicy;
}
public void setConsistencyLevelPolicy(
ConsistencyLevelPolicy consistencyLevelPolicy) {
this.consistencyLevelPolicy = consistencyLevelPolicy;
}
/**
* @param applicationId
* @return keyspace for application UUID
*/
public static String keyspaceForApplication(UUID applicationId) {
if (USE_VIRTUAL_KEYSPACES) {
return STATIC_APPLICATION_KEYSPACE;
} else {
return "Application_" + applicationId.toString().replace('-', '_');
}
}
public static UUID prefixForApplication(UUID applicationId) {
if (USE_VIRTUAL_KEYSPACES) {
return applicationId;
} else {
return null;
}
}
public Keyspace getKeyspace(String keyspace, UUID prefix) {
Keyspace ko = null;
if (USE_VIRTUAL_KEYSPACES && (prefix != null)) {
ko = createVirtualKeyspace(keyspace, prefix, ue, cluster,
consistencyLevelPolicy, ON_FAIL_TRY_ALL_AVAILABLE,
accessMap);
} else {
ko = HFactory.createKeyspace(keyspace, cluster,
consistencyLevelPolicy, ON_FAIL_TRY_ALL_AVAILABLE,
accessMap);
}
return ko;
}
public Keyspace getApplicationKeyspace(UUID applicationId) {
assert applicationId != null;
Keyspace ko = getKeyspace(keyspaceForApplication(applicationId),
prefixForApplication(applicationId));
return ko;
}
/**
* The Usergrid_Applications keyspace directly
*
* @return
*/
public Keyspace getUsergridApplicationKeyspace() {
return getKeyspace(STATIC_APPLICATION_KEYSPACE, null);
}
public Keyspace getSystemKeyspace() {
return systemKeyspace;
}
public void createKeyspace(String keyspace,
List cf_defs) {
logger.info("Creating keyspace: {}", keyspace);
String strategy_class = getString(properties,
"cassandra.keyspace.strategy",
"org.apache.cassandra.locator.SimpleStrategy");
logger.info("Using strategy: {}", strategy_class);
int replication_factor = getIntValue(properties,
"cassandra.keyspace.replication", 1);
logger.info("Using replication (may be overriden by strategy options): {}", replication_factor);
try {
ThriftKsDef ks_def = (ThriftKsDef) HFactory
.createKeyspaceDefinition(keyspace, strategy_class,
replication_factor,
new ArrayList());
@SuppressWarnings({ "unchecked", "rawtypes" })
Map strategy_options = filter((Map) properties,
"cassandra.keyspace.strategy.options.", true);
if (strategy_options.size() > 0) {
logger.info("Strategy options: {}", mapToFormattedJsonString(strategy_options));
ks_def.setStrategyOptions(strategy_options);
}
cluster.addKeyspace(ks_def);
} catch (Throwable e) {
logger.error("Exception while creating keyspace, {} - probably already exists",keyspace, e);
}
boolean delay_configuration = (!"org.apache.cassandra.locator.SimpleStrategy"
.equals(strategy_class)) || (replication_factor > 1);
if ( properties.getProperty("cassandra.configuration.use_delay") != null ) {
delay_configuration = Boolean.parseBoolean(properties.getProperty("cassandra.configuration.use_delay"));
}
if (delay_configuration) {
logger.info("Waiting 10s after keyspace creation");
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
}
}
if (cf_defs != null) {
for (ColumnFamilyDefinition cfDef : cf_defs) {
try {
cluster.addColumnFamily(cfDef);
} catch (Throwable e) {
logger.error(
"Exception while creating CF, {} - probably already exists", cfDef.getName(), e);
}
if (delay_configuration) {
try {
logger.info("Waiting 2s after CF creation");
Thread.sleep(2000);
} catch (InterruptedException e) {
}
}
}
}
if (delay_configuration) {
try {
logger.info("Waiting 5s before continuing setup");
Thread.sleep(5000);
} catch (InterruptedException e) {
}
}
}
public void checkKeyspaces() {
List ksDefs = null;
try {
ksDefs = cluster.describeKeyspaces();
} catch (Exception e) {
db_logger.error("Unable to describe keyspaces", e);
}
if (ksDefs != null) {
for (KeyspaceDefinition ksDef : ksDefs) {
logger.info(ksDef.getName().toString());
}
}
}
public void logKeyspaces() {
List ksDefs = null;
try {
ksDefs = cluster.describeKeyspaces();
} catch (Exception e) {
db_logger.error("Unable to describe keyspaces", e);
}
if (ksDefs != null) {
for (KeyspaceDefinition ksDef : ksDefs) {
System.out.println("Keyspace: " + ksDef.getName().toString());
for (ColumnFamilyDefinition cf : ksDef.getCfDefs()) {
System.out.println(" CF: " + cf.getName());
System.out.println(" id: " + cf.getId());
System.out.println(" comment: " + cf.getComment());
System.out.println(" rowCacheSize: "
+ cf.getRowCacheSize());
System.out.println(" keyCacheSize: "
+ cf.getKeyCacheSize());
System.out.println(" readRepairChance: "
+ cf.getReadRepairChance());
System.out.println(" gcGraceSeconds: "
+ cf.getGcGraceSeconds());
System.out.println(" minCompactionThreshold: "
+ cf.getMinCompactionThreshold());
System.out.println(" maxCompactionThreshold: "
+ cf.getMaxCompactionThreshold());
System.out.println(" rowCacheSavePeriodInSeconds: "
+ cf.getRowCacheSavePeriodInSeconds());
System.out.println(" keyCacheSavePeriodInSeconds: "
+ cf.getKeyCacheSavePeriodInSeconds());
System.out.println(" memtableFlushAfterMins: "
+ cf.getMemtableFlushAfterMins());
System.out.println(" memtableThroughputInMb: "
+ cf.getMemtableThroughputInMb());
System.out.println(" memtableOperationsInMillions: "
+ cf.getMemtableOperationsInMillions());
System.out.println(" keyspaceName: "
+ cf.getKeyspaceName());
System.out.println(" comparatorType: "
+ cf.getColumnType());
System.out.println(" columnType: "
+ JsonUtils.mapToJsonString(cf.getColumnType()));
System.out.println(" subComparatorType: "
+ JsonUtils.mapToJsonString(cf
.getSubComparatorType()));
System.out.println(" keyValidationClass: "
+ cf.getKeyValidationClass());
System.out.println(" columnMetadata:");
for (ColumnDefinition column : cf.getColumnMetadata()) {
System.out.println(" name: "
+ string(column.getName()));
System.out.println(" indexName: "
+ column.getIndexName());
System.out.println(" validationClass: "
+ column.getValidationClass());
System.out.println(" indexType: "
+ column.getIndexType());
}
System.out.println(" defaultValidationClass: "
+ cf.getDefaultValidationClass());
System.out.println(" replicateOnWrite: "
+ cf.isReplicateOnWrite());
}
}
}
}
/**
* Gets the columns.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @return columns
* @throws Exception
* the exception
*/
public List> getAllColumns(Keyspace ko,
Object columnFamily, Object key, Serializer nameSerializer,
Serializer valueSerializer) throws Exception {
if (db_logger.isInfoEnabled()) {
db_logger.info("getColumns cf={} key={}", columnFamily, key);
}
SliceQuery q = createSliceQuery(ko, be,
nameSerializer, valueSerializer);
q.setColumnFamily(columnFamily.toString());
q.setKey(bytebuffer(key));
q.setRange(null, null, false, ALL_COUNT);
QueryResult> r = q.execute();
ColumnSlice slice = r.get();
List> results = slice.getColumns();
if (db_logger.isInfoEnabled()) {
if (results == null) {
db_logger.info("getColumns returned null");
} else {
db_logger.info("getColumns returned {} columns",results.size());
}
}
return results;
}
public List> getAllColumns(Keyspace ko,
Object columnFamily, Object key) throws Exception {
return getAllColumns(ko, columnFamily, key, se, be);
}
public Set getAllColumnNames(Keyspace ko, Object columnFamily,
Object key) throws Exception {
List> columns = getAllColumns(ko,
columnFamily, key);
Set set = new LinkedHashSet();
for (HColumn column : columns) {
set.add(column.getName());
}
return set;
}
/**
* Gets the columns.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @param start
* the start
* @param finish
* the finish
* @param count
* the count
* @param reversed
* the reversed
* @return columns
* @throws Exception
* the exception
*/
public List> getColumns(Keyspace ko,
Object columnFamily, Object key, Object start, Object finish,
int count, boolean reversed) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("getColumns cf=" + columnFamily + " key=" + key
+ " start=" + start + " finish=" + finish + " count="
+ count + " reversed=" + reversed);
}
SliceQuery q = createSliceQuery(ko,
be, be, be);
q.setColumnFamily(columnFamily.toString());
q.setKey(bytebuffer(key));
ByteBuffer start_bytes = null;
if (start instanceof DynamicComposite) {
start_bytes = ((DynamicComposite) start).serialize();
} else if (start instanceof List) {
start_bytes = DynamicComposite.toByteBuffer((List) start);
} else {
start_bytes = bytebuffer(start);
}
ByteBuffer finish_bytes = null;
if (finish instanceof DynamicComposite) {
finish_bytes = ((DynamicComposite) finish).serialize();
} else if (finish instanceof List) {
finish_bytes = DynamicComposite.toByteBuffer((List) finish);
} else {
finish_bytes = bytebuffer(finish);
}
/*
* if (reversed) { q.setRange(finish_bytes, start_bytes, reversed,
* count); } else { q.setRange(start_bytes, finish_bytes, reversed,
* count); }
*/
q.setRange(start_bytes, finish_bytes, reversed, count);
QueryResult> r = q.execute();
ColumnSlice slice = r.get();
List> results = slice.getColumns();
if (db_logger.isDebugEnabled()) {
if (results == null) {
db_logger.debug("getColumns returned null");
} else {
db_logger.debug("getColumns returned " + results.size()
+ " columns");
}
}
return results;
}
public Map>> multiGetColumns(
Keyspace ko, Object columnFamily, List keys, Object start,
Object finish, int count, boolean reversed) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("multiGetColumns cf=" + columnFamily + " keys="
+ keys + " start=" + start + " finish=" + finish
+ " count=" + count + " reversed=" + reversed);
}
MultigetSliceQuery q = createMultigetSliceQuery(
ko, be, be, be);
q.setColumnFamily(columnFamily.toString());
q.setKeys(bytebuffers(keys));
ByteBuffer start_bytes = null;
if (start instanceof DynamicComposite) {
start_bytes = ((DynamicComposite) start).serialize();
} else if (start instanceof List) {
start_bytes = DynamicComposite.toByteBuffer((List) start);
} else {
start_bytes = bytebuffer(start);
}
ByteBuffer finish_bytes = null;
if (finish instanceof DynamicComposite) {
finish_bytes = ((DynamicComposite) finish).serialize();
} else if (finish instanceof List) {
finish_bytes = DynamicComposite.toByteBuffer((List) finish);
} else {
finish_bytes = bytebuffer(finish);
}
q.setRange(start_bytes, finish_bytes, reversed, count);
QueryResult> r = q.execute();
Rows rows = r.get();
Map>> results = new LinkedHashMap>>();
for (Row row : rows) {
results.put(row.getKey(), row.getColumnSlice().getColumns());
}
return results;
}
/**
* Gets the columns.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param keys
* the keys
* @return map of keys to columns
* @throws Exception
* the exception
*/
public Rows getRows(Keyspace ko, Object columnFamily,
List keys, Serializer keySerializer,
Serializer nameSerializer, Serializer valueSerializer)
throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("getColumns cf=" + columnFamily + " keys=" + keys);
}
MultigetSliceQuery q = createMultigetSliceQuery(ko,
keySerializer, nameSerializer, valueSerializer);
q.setColumnFamily(columnFamily.toString());
q.setKeys(keys);
q.setRange(null, null, false, ALL_COUNT);
QueryResult> r = q.execute();
Rows results = r.get();
if (db_logger.isInfoEnabled()) {
if (results == null) {
db_logger.info("getColumns returned null");
} else {
db_logger.info("getColumns returned " + results.getCount()
+ " columns");
}
}
return results;
}
/**
* Gets the columns.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @param columnNames
* the column names
* @return columns
* @throws Exception
* the exception
*/
@SuppressWarnings("unchecked")
public List> getColumns(Keyspace ko,
Object columnFamily, Object key, Set columnNames,
Serializer nameSerializer, Serializer valueSerializer)
throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("getColumns cf=" + columnFamily + " key=" + key
+ " names=" + columnNames);
}
SliceQuery q = createSliceQuery(ko, be,
nameSerializer, valueSerializer);
q.setColumnFamily(columnFamily.toString());
q.setKey(bytebuffer(key));
// q.setColumnNames(columnNames.toArray(new String[0]));
q.setColumnNames((N[]) nameSerializer.fromBytesSet(
se.toBytesSet(new ArrayList(columnNames))).toArray());
QueryResult> r = q.execute();
ColumnSlice slice = r.get();
List> results = slice.getColumns();
if (db_logger.isInfoEnabled()) {
if (results == null) {
db_logger.info("getColumns returned null");
} else {
db_logger.info("getColumns returned " + results.size()
+ " columns");
}
}
return results;
}
/**
* Gets the columns.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param keys
* the keys
* @param columnNames
* the column names
* @return map of keys to columns
* @throws Exception
* the exception
*/
@SuppressWarnings("unchecked")
public Rows getRows(Keyspace ko, Object columnFamily,
List keys, Set columnNames, Serializer keySerializer,
Serializer nameSerializer, Serializer valueSerializer)
throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("getColumns cf=" + columnFamily + " keys=" + keys
+ " names=" + columnNames);
}
MultigetSliceQuery q = createMultigetSliceQuery(ko,
keySerializer, nameSerializer, valueSerializer);
q.setColumnFamily(columnFamily.toString());
q.setKeys(keys);
q.setColumnNames((N[]) nameSerializer.fromBytesSet(
se.toBytesSet(new ArrayList(columnNames))).toArray());
QueryResult> r = q.execute();
Rows results = r.get();
if (db_logger.isInfoEnabled()) {
if (results == null) {
db_logger.info("getColumns returned null");
} else {
db_logger.info("getColumns returned " + results.getCount()
+ " columns");
}
}
return results;
}
/**
* Gets the column.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @param column
* the column
* @return column
* @throws Exception
* the exception
*/
public HColumn getColumn(Keyspace ko, Object columnFamily,
Object key, N column, Serializer nameSerializer,
Serializer valueSerializer) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("getColumn cf=" + columnFamily + " key=" + key
+ " column=" + column);
}
/*
* ByteBuffer column_bytes = null; if (column instanceof List) {
* column_bytes = Composite.serializeToByteBuffer((List) column); }
* else { column_bytes = bytebuffer(column); }
*/
ColumnQuery q = HFactory.createColumnQuery(ko, be,
nameSerializer, valueSerializer);
QueryResult> r = q.setKey(bytebuffer(key))
.setName(column).setColumnFamily(columnFamily.toString())
.execute();
HColumn result = r.get();
if (db_logger.isInfoEnabled()) {
if (result == null) {
db_logger.info("getColumn returned null");
}
}
return result;
}
public ColumnSlice getColumns(Keyspace ko,
Object columnFamily, Object key, N[] columns,
Serializer nameSerializer, Serializer valueSerializer)
throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("getColumn cf=" + columnFamily + " key=" + key
+ " column=" + columns);
}
/*
* ByteBuffer column_bytes = null; if (column instanceof List) {
* column_bytes = Composite.serializeToByteBuffer((List) column); }
* else { column_bytes = bytebuffer(column); }
*/
SliceQuery q = HFactory.createSliceQuery(ko, be,
nameSerializer, valueSerializer);
QueryResult> r = q.setKey(bytebuffer(key))
.setColumnNames(columns)
.setColumnFamily(columnFamily.toString()).execute();
ColumnSlice result = r.get();
if (db_logger.isDebugEnabled()) {
if (result == null) {
db_logger.debug("getColumn returned null");
}
}
return result;
}
public HColumn getColumn(Keyspace ko,
Object columnFamily, Object key, String column) throws Exception {
return getColumn(ko, columnFamily, key, column, se, be);
}
public void setColumn(Keyspace ko, Object columnFamily, Object key,
Object columnName, Object columnValue) throws Exception {
this.setColumn(ko, columnFamily, key, columnName, columnValue, 0);
}
public void setColumn(Keyspace ko, Object columnFamily, Object key,
Object columnName, Object columnValue, int ttl) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("setColumn cf=" + columnFamily + " key=" + key
+ " name=" + columnName + " value=" + columnValue);
}
ByteBuffer name_bytes = null;
if (columnName instanceof List) {
name_bytes = DynamicComposite.toByteBuffer((List) columnName);
} else {
name_bytes = bytebuffer(columnName);
}
ByteBuffer value_bytes = null;
if (columnValue instanceof List) {
value_bytes = DynamicComposite.toByteBuffer((List) columnValue);
} else {
value_bytes = bytebuffer(columnValue);
}
HColumn col = createColumn(name_bytes,
value_bytes, be, be);
if (ttl != 0) {
col.setTtl(ttl);
}
Mutator m = createMutator(ko, be);
m.insert(bytebuffer(key), columnFamily.toString(), col);
}
/**
* Sets the columns.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @param map
* the map
* @throws Exception
* the exception
*/
public void setColumns(Keyspace ko, Object columnFamily, byte[] key,
Map map) throws Exception {
this.setColumns(ko, columnFamily, key, map, 0);
}
public void setColumns(Keyspace ko, Object columnFamily, byte[] key,
Map map, int ttl) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("setColumns cf=" + columnFamily + " key=" + key
+ " map=" + map + (ttl != 0 ? " ttl=" + ttl : ""));
}
Mutator m = createMutator(ko, be);
long timestamp = createTimestamp();
for (Object name : map.keySet()) {
Object value = map.get(name);
if (value != null) {
ByteBuffer name_bytes = null;
if (name instanceof List) {
name_bytes = DynamicComposite.toByteBuffer((List) name);
} else {
name_bytes = bytebuffer(name);
}
ByteBuffer value_bytes = null;
if (value instanceof List) {
value_bytes = DynamicComposite
.toByteBuffer((List) value);
} else {
value_bytes = bytebuffer(value);
}
HColumn col = createColumn(name_bytes,
value_bytes, timestamp, be, be);
if (ttl != 0) {
col.setTtl(ttl);
}
m.addInsertion(
bytebuffer(key),
columnFamily.toString(),
createColumn(name_bytes, value_bytes, timestamp, be, be));
}
}
batchExecute(m, CassandraService.RETRY_COUNT);
}
/**
* Create a timestamp based on the TimeResolution set to the cluster.
*
* @return a timestamp
*/
public long createTimestamp() {
return chc.getClockResolution().createClock();
}
/**
* Delete column.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @param column
* the column
* @throws Exception
* the exception
*/
public void deleteColumn(Keyspace ko, Object columnFamily, Object key,
Object column) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("deleteColumn cf=" + columnFamily + " key=" + key
+ " name=" + column);
}
Mutator m = createMutator(ko, be);
m.delete(bytebuffer(key), columnFamily.toString(), bytebuffer(column),
be);
}
/**
* Gets the row keys.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @return set of keys
* @throws Exception
* the exception
*/
public Set getRowKeySet(Keyspace ko, Object columnFamily,
Serializer keySerializer) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("getRowKeys cf=" + columnFamily);
}
RangeSlicesQuery q = createRangeSlicesQuery(
ko, keySerializer, be, be);
q.setColumnFamily(columnFamily.toString());
q.setKeys(null, null);
q.setColumnNames(new ByteBuffer[0]);
QueryResult> r = q.execute();
OrderedRows rows = r.get();
Set results = new LinkedHashSet();
for (Row row : rows) {
results.add(row.getKey());
}
if (db_logger.isDebugEnabled()) {
{
db_logger.debug("getRowKeys returned " + results.size()
+ " rows");
}
}
return results;
}
/**
* Gets the row keys as uui ds.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @return list of row key UUIDs
* @throws Exception
* the exception
*/
public List getRowKeyList(Keyspace ko, Object columnFamily,
Serializer keySerializer) throws Exception {
RangeSlicesQuery q = createRangeSlicesQuery(
ko, keySerializer, be, be);
q.setColumnFamily(columnFamily.toString());
q.setKeys(null, null);
q.setColumnNames(new ByteBuffer[0]);
QueryResult> r = q.execute();
OrderedRows rows = r.get();
List list = new ArrayList();
for (Row row : rows) {
list.add(row.getKey());
// K uuid = row.getKey();
// if (uuid != UUIDUtils.zeroUUID) {
// list.add(uuid);
// }
}
return list;
}
/**
* Delete row.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @throws Exception
* the exception
*/
public void deleteRow(Keyspace ko, final Object columnFamily,
final Object key) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("deleteRow cf=" + columnFamily + " key=" + key);
}
createMutator(ko, be).addDeletion(bytebuffer(key),
columnFamily.toString()).execute();
}
public void deleteRow(Keyspace ko, final Object columnFamily,
final String key) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("deleteRow cf=" + columnFamily + " key=" + key);
}
createMutator(ko, se).addDeletion(key, columnFamily.toString())
.execute();
}
/**
* Delete row.
*
* @param keyspace
* the keyspace
* @param columnFamily
* the column family
* @param key
* the key
* @param timestamp
* the timestamp
* @throws Exception
* the exception
*/
public void deleteRow(Keyspace ko, final Object columnFamily,
final Object key, final long timestamp) throws Exception {
if (db_logger.isDebugEnabled()) {
db_logger.debug("deleteRow cf=" + columnFamily + " key=" + key
+ " timestamp=" + timestamp);
}
createMutator(ko, be).addDeletion(bytebuffer(key),
columnFamily.toString(), timestamp).execute();
}
/**
* Gets the id list.
*
* @param ko
* the keyspace
*
* @param key
* the key
*
* @param start
* the start
* @param finish
* the finish
* @param count
* the count
* @param reversed
* True if the scan should be reversed
*
* @param locator
* The index locator instance
* @param applicationId
* The applicationId
* @param collectionName
* The name of the collection to get the Ids for
* @return list of columns as UUIDs
* @throws Exception
* the exception
*/
public List getIdList(Keyspace ko, Object key, Object start,
Object finish, int count, boolean reversed,
IndexBucketLocator locator, UUID applicationId,
String collectionName) throws Exception {
if (count <= 0) {
count = DEFAULT_COUNT;
}
List ids = new ArrayList();
if (NULL_ID.equals(start)) {
start = null;
}
IndexBucketScanner scanner = new IndexBucketScanner(this, locator,
ENTITY_ID_SETS, applicationId, IndexType.COLLECTION, key,
start, finish, reversed, count, collectionName);
List> results = scanner.load();
if (results != null) {
for (HColumn result : results) {
ByteBuffer bytes = result.getName();
ids.add(uuid(bytes));
}
}
return ids;
}
public int countColumns(Keyspace ko, Object columnFamily, Object key)
throws Exception {
CountQuery cq = HFactory.createCountQuery(ko,
be, be);
cq.setColumnFamily(columnFamily.toString());
cq.setKey(bytebuffer(key));
cq.setRange(ByteBuffer.allocate(0), ByteBuffer.allocate(0), 100000000);
QueryResult r = cq.execute();
if (r == null) {
return 0;
}
return r.get();
}
/**
* Sets the id list.
*
* @param keyspace
* the keyspace
* @param targetId
* the target id
* @param columnFamily
* the column family
* @param keyPrefix
* the key prefix
* @param keySuffix
* the key suffix
* @param keyIds
* the key ids
* @param setColumnValue
* the set column value
* @throws Exception
* the exception
*/
public void setIdList(Keyspace ko, UUID targetId, String keyPrefix,
String keySuffix, List keyIds) throws Exception {
long timestamp = createTimestamp();
Mutator batch = createMutator(ko, be);
batch = buildSetIdListMutator(batch, targetId,
ENTITY_ID_SETS.toString(), keyPrefix, keySuffix, keyIds,
timestamp);
batchExecute(batch, CassandraService.RETRY_COUNT);
}
boolean clusterUp = false;
public void startClusterHealthCheck() {
ScheduledExecutorService executorService = Executors
.newSingleThreadScheduledExecutor();
executorService.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
if (cluster != null) {
HConnectionManager connectionManager = cluster
.getConnectionManager();
if (connectionManager != null) {
clusterUp = !connectionManager.getHosts().isEmpty();
}
}
}
}, 1, 5, TimeUnit.SECONDS);
}
}