![JAR search and dependency download from the Maven repository](/logo.png)
com.tangosol.coherence.jcache.partitionedcache.PartitionedCacheBinaryEntryStore Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of coherence-jcache Show documentation
Show all versions of coherence-jcache Show documentation
Oracle Coherence Community Edition
/*
* Copyright (c) 2000, 2021, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* http://oss.oracle.com/licenses/upl.
*/
package com.tangosol.coherence.jcache.partitionedcache;
import com.oracle.coherence.common.base.Logger;
import com.tangosol.coherence.jcache.common.Helper;
import com.tangosol.coherence.jcache.common.JCacheContext;
import com.tangosol.coherence.jcache.common.JCacheEntryMetaInf;
import com.tangosol.coherence.jcache.common.JCacheIdentifier;
import com.tangosol.coherence.jcache.partitionedcache.processors.BinaryEntryHelper;
import com.tangosol.net.BackingMapManagerContext;
import com.tangosol.net.cache.BinaryEntryStore;
import com.tangosol.util.Binary;
import com.tangosol.util.BinaryEntry;
import com.tangosol.util.Converter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import javax.cache.Cache;
import javax.cache.expiry.ExpiryPolicy;
import javax.cache.integration.CacheLoader;
import javax.cache.integration.CacheLoaderException;
import javax.cache.integration.CacheWriter;
import javax.cache.integration.CacheWriterException;
/**
* Generic Coherence BinaryEntryStore for Coherence JCache Adapter.
*
* @param key type
* @param value type
*
* @version Coherence 12.1.3
* @author jf 2013.07.08
*/
public class PartitionedCacheBinaryEntryStore
implements BinaryEntryStore
{
// ----- Constructors ---------------------------------------------------
/**
* Construct a native Coherence CacheStore which implements JCache read-through and write-through semantics.
*
* @param sName internal Coherence NamedCache cache name. encodes the JCache CacheManager context.
* @param mgrCtx Coherence context for NamedCache
* @param classLoader classLoader used by the Coherence NamedCache
*
* Only gets called if coherence configuration file defines <cache-scheme> element referring to this class.
*
* Here is the configuration that is now injected into coherence configuration files using the JCache namespace.
*
* {@code
* <cachestore-scheme>
* <class-scheme>
* <class-name>com.tangosol.coherence.jcache.partitionedcache.PartitionedCacheBinaryEntryStore</class-name>
* <init-params>
* <init-param>
* <param-type>java.lang.String</param-type>
* <param-value>{cache-name}</param-value>
* </init-param>
* <init-param>
* <param-type>com.tangosol.net.BackingMapManagerContext</param-type>
* <param-value>{manager-context}</param-value>
* </init-param>
* <init-param>
* <param-type>java.lang.ClassLoader</param-type>
* <param-value>{class-loader}</param-value>
* </init-param>
* </init-params>
* </class-scheme>
* </cachestore-scheme>
* }
*/
public PartitionedCacheBinaryEntryStore(String sName, BackingMapManagerContext mgrCtx, ClassLoader classLoader)
{
m_cacheId = new JCacheIdentifier(sName);
Logger.finest(() -> "Created PartitionedCacheBinaryEntryStore for [name=" + sName
+ " JCacheId=" + m_cacheId.getCanonicalCacheName() + "]");
}
// ----- BinaryEntryStore methods ---------------------------------------
@Override
public void load(BinaryEntry binaryEntry)
{
long ldtStart = Helper.getCurrentTimeMillis();
CacheLoader loader = getReadThroughCacheLoader(binaryEntry);
if (loader != null)
{
/* Too verbose. But keep in case need to debug native loading in future.
if (Logger.isEnabled(Logger.FINEST))
{
Logger.finest("PartitionedCacheBinaryEntryStore.load called for binaryEntry key="
+ binaryEntry.getKey()
+ Base.printStackTrace(new Exception("stacktrace")));
}
*/
Object oValue = loader.load(binaryEntry.getKey());
if (oValue != null)
{
Binary binValue = (Binary) binaryEntry.getContext().getValueToInternalConverter().convert(oValue);
JCacheEntryMetaInf metaInf = new JCacheEntryMetaInf(ldtStart, getExpiryPolicy(binaryEntry));
binValue = BinaryEntryHelper.decorateBinValueWithJCacheMetaInf(binValue, metaInf,
binaryEntry.getContext());
// next line ensures that a just loaded entry is not written back with write-through.
binValue = BinaryEntryHelper.decorateUpdateJCacheSynthetic(binValue, binaryEntry.getContext(),
BinaryEntryHelper.JCACHE_SYNTHETIC_LOADED);
binaryEntry.updateBinaryValue(binValue);
Logger.finest(() -> "PartitionedCacheBinaryEntryStore.load loaded key=" + binaryEntry.getKey()
+ " value=" + oValue);
}
}
}
@Override
public void loadAll(Set set)
{
long ldtStart = Helper.getCurrentTimeMillis();
Set binEntries = (Set) set;
if (!binEntries.isEmpty())
{
BinaryEntry aBinEntry = binEntries.iterator().next();
CacheLoader loader = getReadThroughCacheLoader(aBinEntry);
if (loader != null)
{
try
{
JCacheEntryMetaInf metaInf = new JCacheEntryMetaInf(ldtStart,
getExpiryPolicy(aBinEntry));
Converter toInternalConverter = aBinEntry.getContext().getValueToInternalConverter();
Map loadedMap = loader.loadAll(new KeyIterable(binEntries));
for (BinaryEntry binEntry : binEntries)
{
Object oValue = loadedMap.get(binEntry.getKey());
if (!binEntry.isPresent() && oValue != null)
{
Binary binValue = (Binary) toInternalConverter.convert(oValue);
binValue = BinaryEntryHelper.decorateBinValueWithJCacheMetaInf(binValue, metaInf,
binEntry.getContext());
// next line ensures that a just loaded entry is not written back with write-through.
binValue = BinaryEntryHelper.decorateUpdateJCacheSynthetic(binValue, binEntry.getContext(),
BinaryEntryHelper.JCACHE_SYNTHETIC_LOADED);
binEntry.updateBinaryValue(binValue);
}
}
}
catch (Throwable e)
{
throw new CacheLoaderException(e);
}
}
}
}
@Override
public void store(BinaryEntry binaryEntry)
{
CacheWriter writer = getCacheWriter(binaryEntry);
if (writer != null)
{
boolean fJCacheSynthetic = BinaryEntryHelper.isJCacheSyntheticOrLoaded(binaryEntry);
if (!fJCacheSynthetic)
{
Cache.Entry entry = new CacheEntry(binaryEntry);
writer.write(entry);
}
}
}
@Override
public void storeAll(Set set)
{
if (set.isEmpty())
{
return;
}
Set binEntries = (Set) set;
CacheWriter writer = getCacheWriter(binEntries.iterator().next());
if (writer != null)
{
Iterator iter = binEntries.iterator();
// remove jcache synthetic updates and readThrough loaded entries
while (iter.hasNext())
{
if (BinaryEntryHelper.isJCacheSyntheticOrLoaded(iter.next()))
{
iter.remove();
}
}
// writeAll remainder
if (set.size() != 0)
{
List jcacheEntries = new ArrayList(set.size());
for (BinaryEntry binEntry : binEntries )
{
jcacheEntries.add(new CacheEntry(binEntry));
}
try
{
writer.writeAll(jcacheEntries);
// the parameter set must have all entries removed if
// they were all successfully written.
set.clear();
}
catch (RuntimeException e)
{
// handle partial writeAll interrupted by an exception.
// all entries remaining in jcacheEntries were not written.
// ensure that parameter set has same members in it that
// jcacheEntries has. this represents the entries
// that were not written due to exception.
set.clear();
for (Cache.Entry entry : jcacheEntries)
{
set.add(entry.unwrap(BinaryEntry.class));
}
throw e;
}
}
}
}
@Override
public void erase(BinaryEntry binaryEntry)
{
CacheWriter writer = getCacheWriter(binaryEntry);
if (writer != null)
{
Logger.finest(() -> "PartitionedCacheBinaryEntryStore.erase calling CacheWriter.delete on key="
+ binaryEntry.getKey() + "CacheWriter class="
+ writer.getClass().getCanonicalName());
try
{
writer.delete(binaryEntry.getKey());
}
catch (UnsupportedOperationException e)
{
// Have to wrapper UnsupportedOperationException since Coherence impl detects and
// disables write-through erase impacting the entry.remove().
// For JCache implementation, desire that Coherence reverts the entry.remove() so nest the
// UnsupportedOperationException in general JCache CacheWriterException so Coherence works as it needs
// to for JCache. This effectively causes the Cache.remove(K) to not occur due to write-through
// failure. Just as specified in CacheWriter.delete(K).
throw new CacheWriterException("CacheWriter implementation " + writer.getClass().getCanonicalName()
+ ".delete threw an exception", e);
}
}
}
@Override
public void eraseAll(Set setBinEntries)
{
if (setBinEntries.isEmpty())
{
return;
}
CacheWriter writer = getCacheWriter((BinaryEntry) setBinEntries.iterator().next());
if (writer != null)
{
Set
© 2015 - 2025 Weber Informatics LLC | Privacy Policy