
com.alachisoft.ncache.client.internal.datastructure.DataStructureManagerImpl Maven / Gradle / Ivy
package com.alachisoft.ncache.client.internal.datastructure;
import Alachisoft.NCache.Caching.Util.CollectionUtil;
import Alachisoft.NCache.Common.DataTypes.CollectionCreateOrGetOperation;
import Alachisoft.NCache.Common.ErrorHandling.ErrorCodes;
import Alachisoft.NCache.Common.ErrorHandling.ErrorMessages;
import com.alachisoft.ncache.client.CacheItem;
import com.alachisoft.ncache.client.datastructures.*;
import com.alachisoft.ncache.client.internal.caching.CacheImpl;
import com.alachisoft.ncache.client.internal.util.ConversionUtil;
import com.alachisoft.ncache.runtime.caching.*;
import com.alachisoft.ncache.runtime.caching.DistributedDataStructure;
import com.alachisoft.ncache.runtime.exceptions.CacheException;
import com.alachisoft.ncache.runtime.exceptions.runtime.OperationFailedRuntimeException;
public class DataStructureManagerImpl implements DataStructureManager {
private CacheImpl _cache;
public DataStructureManagerImpl(CacheImpl cache) {
_cache = cache;
}
public final CacheImpl getCache() {
return _cache;
}
public void setCache(CacheImpl value) {
_cache = value;
}
@Override
public Counter createCounter(String key) throws CacheException {
return createCounter(key, null, 0, null);
}
@Override
public Counter createCounter(String key, long initialValue) throws CacheException, IllegalArgumentException {
return createCounter(key, null, initialValue, null);
}
@Override
public Counter createCounter(String key, DataStructureAttributes attributes, long initialValue, WriteThruOptions options) throws CacheException {
createCounterInternal(key, attributes, initialValue , options);
return new CounterImpl(key, getCache(), options,null);
}
@Override
public DistributedMap createMap(String key,Class> cls) throws CacheException {
return createMap(key, null, null,cls);
}
@Override
public DistributedMap createMap(String key, DataStructureAttributes attributes, WriteThruOptions options,Class> cls) throws CacheException {
if(cls==null)
throw new IllegalArgumentException("Value cannot be null."+System.lineSeparator()+"Parameter name: cls");
createInternal(key, attributes, options, DistributedDataStructure.Map,cls);
return new DistributedMapImpl(key, _cache, options,cls);
}
@Override
public DistributedHashSet createHashSet(String key,Class> cls) throws CacheException {
return createHashSet(key, null, null,cls);
}
@Override
public DistributedHashSet createHashSet(String key, DataStructureAttributes attributes, WriteThruOptions options,Class> cls) throws CacheException {
if(cls==null)
throw new IllegalArgumentException("Value cannot be null."+System.lineSeparator()+"Parameter name: cls");
CollectionUtil.validateTypeForHashSetOnCreation(cls);
createInternal(key, attributes, options, DistributedDataStructure.Set,cls);
return new DistributedHashSetImpl(key, getCache(), options,cls);
}
@Override
public DistributedList createList(String key,Class> cls) throws CacheException {
return createList(key, null, null,cls);
}
@Override
public DistributedList createList(String key, DataStructureAttributes attributes, WriteThruOptions options,Class> cls) throws CacheException {
if(cls==null)
throw new IllegalArgumentException("Value cannot be null."+System.lineSeparator()+"Parameter name: cls");
createInternal(key, attributes, options, DistributedDataStructure.List,cls);
return new DistributedListImpl(key, getCache(), options,cls);
}
@Override
public DistributedQueue createQueue(String key,Class> cls) throws CacheException {
return createQueue(key, null, null,cls);
}
@Override
public DistributedQueue createQueue(String key, DataStructureAttributes attributes, WriteThruOptions options,Class> cls) throws CacheException {
if(cls==null)
throw new IllegalArgumentException("Value cannot be null."+System.lineSeparator()+"Parameter name: cls");
createInternal(key, attributes, options, DistributedDataStructure.Queue,cls);
return new DistributedQueueImpl(key, getCache(), options,cls);
}
@Override
public Counter getCounter(String key) throws CacheException {
return getCounter(key, null);
}
@Override
public Counter getCounter(String key, ReadThruOptions options) throws CacheException {
if (getInternal(key, options, DistributedDataStructure.Counter))
return new CounterImpl(key, getCache(), null,null);
return null;
}
@Override
public DistributedMap getMap(String key,Class> cls) throws CacheException {
return getMap(key, null,cls);
}
@Override
public DistributedMap getMap(String key, ReadThruOptions options,Class> cls) throws CacheException {
if (getInternal(key, options, DistributedDataStructure.Map))
return new DistributedMapImpl< V>(key, getCache(), null,cls);
return null;
}
@Override
public DistributedHashSet getHashSet(String key,Class> cls) throws CacheException {
return getHashSet(key, null,cls);
}
@Override
public DistributedHashSet getHashSet(String key, ReadThruOptions options,Class> cls) throws CacheException {
if (getInternal(key, options, DistributedDataStructure.Set))
return new DistributedHashSetImpl(key, getCache(), null,cls);
return null;
}
@Override
public DistributedList getList(String key,Class> cls) throws CacheException {
return getList(key, null,cls);
}
@Override
public DistributedList getList(String key, ReadThruOptions options,Class> cls) throws CacheException {
if (getInternal(key, options, DistributedDataStructure.List))
return new DistributedListImpl(key, getCache(), null,cls);
return null;
}
@Override
public DistributedQueue getQueue(String key,Class> cls) throws CacheException {
return getQueue(key, null,cls);
}
@Override
public DistributedQueue getQueue(String key, ReadThruOptions options,Class> cls) throws CacheException {
if (getInternal(key, options, DistributedDataStructure.Queue))
return new DistributedQueueImpl(key, getCache(), null,cls);
return null;
}
@Override
public void remove(String key) throws CacheException {
this.remove(key, null);
}
@Override
public void remove(String key, WriteThruOptions writeThruOptions) throws CacheException {
getCache().delete(key, null, null, writeThruOptions);
}
private void createCounterInternal(String key, DataStructureAttributes attributes, long initialValue , WriteThruOptions options) throws CacheException {
if (key == null || key.isEmpty()) {
throw new IllegalArgumentException("Value cannot be null or empty."+System.lineSeparator()+"Parameter name: key");
}
if (attributes != null && attributes.getExpiration() != null && ConversionUtil.getExpirationAbsoluteInternal(attributes.getExpiration()) != CacheImpl.NoAbsoluteExpiration && ConversionUtil.getExpirationSlidingInternal(attributes.getExpiration()) != CacheImpl.NoSlidingExpiration) {
throw new IllegalArgumentException("You can not set both sliding and absolute expirations on a single item");
}
if (options == null) {
options = new WriteThruOptions(WriteMode.None);
}
CollectionCreateOrGetOperation tempVar = new CollectionCreateOrGetOperation(key, DistributedDataStructure.Counter);
tempVar.setWriteThruOptions(options);
tempVar.setInitialValue(initialValue);
getCache().createDataType(tempVar, attributes);
}
private void createInternal(String key, DataStructureAttributes attributes, WriteThruOptions options, DistributedDataStructure type, Class cls) throws CacheException {
if (key == null || key.isEmpty()) {
throw new IllegalArgumentException("Value cannot be empty or null."+System.lineSeparator()+"Parameter name: key");
}
if (attributes != null && attributes.getExpiration() != null && ConversionUtil.getExpirationAbsoluteInternal(attributes.getExpiration()) != CacheImpl.NoAbsoluteExpiration && ConversionUtil.getExpirationSlidingInternal(attributes.getExpiration()) != CacheImpl.NoSlidingExpiration) {
throw new IllegalArgumentException("You can not set both sliding and absolute expirations on a single item");
}
validateTypeOfDistributedDataType(cls);
if (options == null) {
options = new WriteThruOptions(WriteMode.None);
}
CollectionCreateOrGetOperation tempVar = new CollectionCreateOrGetOperation(key, type);
tempVar.setWriteThruOptions(options);
getCache().createDataType(tempVar, attributes);
}
private boolean getInternal(String key, ReadThruOptions options, DistributedDataStructure type) throws CacheException {
if (key == null || key.isEmpty()) {
throw new IllegalArgumentException("Value cannot be null or empty."+System.lineSeparator()+"Parameter name: key");
}
if (options == null)
options = new ReadThruOptions(ReadMode.None);
CollectionCreateOrGetOperation op = new CollectionCreateOrGetOperation(key, type);
op.setReadThruOptions(options);
return getCache().getDataType(op);
}
private void validateTypeOfDistributedDataType(Class cls) {
// TODO 5.0 SP3: validateTypeOfDistributedDataType
validateTypeNotCacheItem(cls);
}
private void validateTypeNotCacheItem(java.lang.Class type) {
if (type.equals(CacheItem.class)) {
throw new OperationFailedRuntimeException(ErrorMessages.getErrorMessage(ErrorCodes.DataTypes.CACHEITEM_IN_DATA_STRUCTURES));
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy