All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.feilong.lib.springframework.util.ConcurrentReferenceHashMap Maven / Gradle / Ivy

Go to download

feilong is a suite of core and expanded libraries that include utility classes, http, excel,cvs, io classes, and much much more.

There is a newer version: 4.3.0
Show newest version
/*
 * Copyright 2002-2018 the original author or authors.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.feilong.lib.springframework.util;

import java.lang.ref.ReferenceQueue;
import java.lang.ref.SoftReference;
import java.lang.ref.WeakReference;
import java.lang.reflect.Array;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantLock;

/**
 * A {@link ConcurrentHashMap} that uses {@link ReferenceType#SOFT soft} or
 * {@linkplain ReferenceType#WEAK weak} references for both {@code keys} and {@code values}.
 *
 * 

* This class can be used as an alternative to * {@code Collections.synchronizedMap(new WeakHashMap>())} in order to * support better performance when accessed concurrently. This implementation follows the * same design constraints as {@link ConcurrentHashMap} with the exception that * {@code null} values and {@code null} keys are supported. * *

* NOTE: The use of references means that there is no guarantee that items * placed into the map will be subsequently available. The garbage collector may discard * references at any time, so it may appear that an unknown thread is silently removing * entries. * *

* If not explicitly specified, this implementation will use * {@linkplain SoftReference soft entry references}. * * @author Phillip Webb * @author Juergen Hoeller * @since 3.2 * @param * the key type * @param * the value type */ public class ConcurrentReferenceHashMap extends AbstractMap implements ConcurrentMap{ private static final int DEFAULT_INITIAL_CAPACITY = 16; private static final float DEFAULT_LOAD_FACTOR = 0.75f; private static final int DEFAULT_CONCURRENCY_LEVEL = 16; private static final ReferenceType DEFAULT_REFERENCE_TYPE = ReferenceType.SOFT; private static final int MAXIMUM_CONCURRENCY_LEVEL = 1 << 16; private static final int MAXIMUM_SEGMENT_SIZE = 1 << 30; /** * Array of segments indexed using the high order bits from the hash. */ private final Segment[] segments; /** * When the average number of references per table exceeds this value resize will be attempted. */ private final float loadFactor; /** * The reference type: SOFT or WEAK. */ private final ReferenceType referenceType; /** * The shift value used to calculate the size of the segments array and an index from the hash. */ private final int shift; /** * Late binding entry set. */ private volatile Set> entrySet; /** * Create a new {@code ConcurrentReferenceHashMap} instance. * * @param initialCapacity * the initial capacity of the map */ public ConcurrentReferenceHashMap(int initialCapacity){ this(initialCapacity, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL, DEFAULT_REFERENCE_TYPE); } /** * Create a new {@code ConcurrentReferenceHashMap} instance. * * @param initialCapacity * the initial capacity of the map * @param loadFactor * the load factor. When the average number of references per * table exceeds this value, resize will be attempted. * @param concurrencyLevel * the expected number of threads that will concurrently * write to the map * @param referenceType * the reference type used for entries (soft or weak) */ @SuppressWarnings("unchecked") public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor, int concurrencyLevel, ReferenceType referenceType){ Assert.isTrue(initialCapacity >= 0, "Initial capacity must not be negative"); Assert.isTrue(loadFactor > 0f, "Load factor must be positive"); Assert.isTrue(concurrencyLevel > 0, "Concurrency level must be positive"); Assert.notNull(referenceType, "Reference type must not be null"); this.loadFactor = loadFactor; this.shift = calculateShift(concurrencyLevel, MAXIMUM_CONCURRENCY_LEVEL); int size = 1 << this.shift; this.referenceType = referenceType; int roundedUpSegmentCapacity = (int) ((initialCapacity + size - 1L) / size); this.segments = (Segment[]) Array.newInstance(Segment.class, size); for (int i = 0; i < this.segments.length; i++){ this.segments[i] = new Segment(roundedUpSegmentCapacity); } } protected final float getLoadFactor(){ return this.loadFactor; } protected final Segment getSegment(int index){ return this.segments[index]; } /** * Factory method that returns the {@link ReferenceManager}. * This method will be called once for each {@link Segment}. * * @return a new reference manager */ protected ReferenceManager createReferenceManager(){ return new ReferenceManager(); } /** * Get the hash for a given object, apply an additional hash function to reduce * collisions. This implementation uses the same Wang/Jenkins algorithm as * {@link ConcurrentHashMap}. Subclasses can override to provide alternative hashing. * * @param o * the object to hash (may be null) * @return the resulting hash code */ protected int getHash(Object o){ int hash = (o != null ? o.hashCode() : 0); hash += (hash << 15) ^ 0xffffcd7d; hash ^= (hash >>> 10); hash += (hash << 3); hash ^= (hash >>> 6); hash += (hash << 2) + (hash << 14); hash ^= (hash >>> 16); return hash; } @Override public V get(Object key){ Entry entry = getEntryIfAvailable(key); return (entry != null ? entry.getValue() : null); } @Override public V getOrDefault(Object key,V defaultValue){ Entry entry = getEntryIfAvailable(key); return (entry != null ? entry.getValue() : defaultValue); } @Override public boolean containsKey(Object key){ Entry entry = getEntryIfAvailable(key); return (entry != null && ObjectUtils.nullSafeEquals(entry.getKey(), key)); } private Entry getEntryIfAvailable(Object key){ Reference ref = getReference(key, Restructure.WHEN_NECESSARY); return (ref != null ? ref.get() : null); } /** * Return a {@link Reference} to the {@link Entry} for the specified {@code key}, * or {@code null} if not found. * * @param key * the key (can be {@code null}) * @param restructure * types of restructure allowed during this call * @return the reference, or {@code null} if not found */ protected final Reference getReference(Object key,Restructure restructure){ int hash = getHash(key); return getSegmentForHash(hash).getReference(key, hash, restructure); } @Override public V put(K key,V value){ return put(key, value, true); } @Override public V putIfAbsent(K key,V value){ return put(key, value, false); } private V put(final K key,final V value,final boolean overwriteExisting){ return doTask(key, new Task(TaskOption.RESTRUCTURE_BEFORE, TaskOption.RESIZE){ @Override protected V execute(Reference ref,Entry entry,Entries entries){ if (entry != null){ V oldValue = entry.getValue(); if (overwriteExisting){ entry.setValue(value); } return oldValue; } entries.add(value); return null; } }); } @Override public V remove(Object key){ return doTask(key, new Task(TaskOption.RESTRUCTURE_AFTER, TaskOption.SKIP_IF_EMPTY){ @Override protected V execute(Reference ref,Entry entry){ if (entry != null){ ref.release(); return entry.value; } return null; } }); } @Override public boolean remove(Object key,final Object value){ return doTask(key, new Task(TaskOption.RESTRUCTURE_AFTER, TaskOption.SKIP_IF_EMPTY){ @Override protected Boolean execute(Reference ref,Entry entry){ if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), value)){ ref.release(); return true; } return false; } }); } @Override public boolean replace(K key,final V oldValue,final V newValue){ return doTask(key, new Task(TaskOption.RESTRUCTURE_BEFORE, TaskOption.SKIP_IF_EMPTY){ @Override protected Boolean execute(Reference ref,Entry entry){ if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), oldValue)){ entry.setValue(newValue); return true; } return false; } }); } @Override public V replace(K key,final V value){ return doTask(key, new Task(TaskOption.RESTRUCTURE_BEFORE, TaskOption.SKIP_IF_EMPTY){ @Override protected V execute(Reference ref,Entry entry){ if (entry != null){ V oldValue = entry.getValue(); entry.setValue(value); return oldValue; } return null; } }); } @Override public void clear(){ for (Segment segment : this.segments){ segment.clear(); } } /** * Remove any entries that have been garbage collected and are no longer referenced. * Under normal circumstances garbage collected entries are automatically purged as * items are added or removed from the Map. This method can be used to force a purge, * and is useful when the Map is read frequently but updated less often. */ public void purgeUnreferencedEntries(){ for (Segment segment : this.segments){ segment.restructureIfNecessary(false); } } @Override public int size(){ int size = 0; for (Segment segment : this.segments){ size += segment.getCount(); } return size; } @Override public boolean isEmpty(){ for (Segment segment : this.segments){ if (segment.getCount() > 0){ return false; } } return true; } @Override public Set> entrySet(){ Set> entrySet = this.entrySet; if (entrySet == null){ entrySet = new EntrySet(); this.entrySet = entrySet; } return entrySet; } private T doTask(Object key,Task task){ int hash = getHash(key); return getSegmentForHash(hash).doTask(hash, key, task); } private Segment getSegmentForHash(int hash){ return this.segments[(hash >>> (32 - this.shift)) & (this.segments.length - 1)]; } /** * Calculate a shift value that can be used to create a power-of-two value between * the specified maximum and minimum values. * * @param minimumValue * the minimum value * @param maximumValue * the maximum value * @return the calculated shift (use {@code 1 << shift} to obtain a value) */ protected static int calculateShift(int minimumValue,int maximumValue){ int shift = 0; int value = 1; while (value < minimumValue && value < maximumValue){ value <<= 1; shift++; } return shift; } /** * Various reference types supported by this map. */ public enum ReferenceType{ /** Use {@link SoftReference}s */ SOFT, /** Use {@link WeakReference}s */ WEAK } /** * A single segment used to divide the map to allow better concurrent performance. */ @SuppressWarnings("serial") protected final class Segment extends ReentrantLock{ /** * */ private static final long serialVersionUID = 4335858192660193435L; private final ReferenceManager referenceManager; private final int initialSize; /** * Array of references indexed using the low order bits from the hash. * This property should only be set along with {@code resizeThreshold}. */ private volatile Reference[] references; /** * The total number of references contained in this segment. This includes chained * references and references that have been garbage collected but not purged. */ private volatile int count = 0; /** * The threshold when resizing of the references should occur. When {@code count} * exceeds this value references will be resized. */ private int resizeThreshold; public Segment(int initialCapacity){ this.referenceManager = createReferenceManager(); this.initialSize = 1 << calculateShift(initialCapacity, MAXIMUM_SEGMENT_SIZE); setReferences(createReferenceArray(this.initialSize)); } public Reference getReference(Object key,int hash,Restructure restructure){ if (restructure == Restructure.WHEN_NECESSARY){ restructureIfNecessary(false); } if (this.count == 0){ return null; } // Use a local copy to protect against other threads writing Reference[] references = this.references; int index = getIndex(hash, references); Reference head = references[index]; return findInChain(head, key, hash); } /** * Apply an update operation to this segment. * The segment will be locked during the update. * * @param hash * the hash of the key * @param key * the key * @param task * the update operation * @return the result of the operation */ public T doTask(final int hash,final Object key,final Task task){ boolean resize = task.hasOption(TaskOption.RESIZE); if (task.hasOption(TaskOption.RESTRUCTURE_BEFORE)){ restructureIfNecessary(resize); } if (task.hasOption(TaskOption.SKIP_IF_EMPTY) && this.count == 0){ return task.execute(null, null, null); } lock(); try{ final int index = getIndex(hash, this.references); final Reference head = this.references[index]; Reference ref = findInChain(head, key, hash); Entry entry = (ref != null ? ref.get() : null); Entries entries = new Entries(){ @Override public void add(V value){ @SuppressWarnings("unchecked") Entry newEntry = new Entry<>((K) key, value); Reference newReference = Segment.this.referenceManager.createReference(newEntry, hash, head); Segment.this.references[index] = newReference; Segment.this.count++; } }; return task.execute(ref, entry, entries); }finally{ unlock(); if (task.hasOption(TaskOption.RESTRUCTURE_AFTER)){ restructureIfNecessary(resize); } } } /** * Clear all items from this segment. */ public void clear(){ if (this.count == 0){ return; } lock(); try{ setReferences(createReferenceArray(this.initialSize)); this.count = 0; }finally{ unlock(); } } /** * Restructure the underlying data structure when it becomes necessary. This * method can increase the size of the references table as well as purge any * references that have been garbage collected. * * @param allowResize * if resizing is permitted */ protected final void restructureIfNecessary(boolean allowResize){ boolean needsResize = (this.count > 0 && this.count >= this.resizeThreshold); Reference ref = this.referenceManager.pollForPurge(); if (ref != null || (needsResize && allowResize)){ lock(); try{ int countAfterRestructure = this.count; Set> toPurge = Collections.emptySet(); if (ref != null){ toPurge = new HashSet<>(); while (ref != null){ toPurge.add(ref); ref = this.referenceManager.pollForPurge(); } } countAfterRestructure -= toPurge.size(); // Recalculate taking into account count inside lock and items that // will be purged needsResize = (countAfterRestructure > 0 && countAfterRestructure >= this.resizeThreshold); boolean resizing = false; int restructureSize = this.references.length; if (allowResize && needsResize && restructureSize < MAXIMUM_SEGMENT_SIZE){ restructureSize <<= 1; resizing = true; } // Either create a new table or reuse the existing one Reference[] restructured = (resizing ? createReferenceArray(restructureSize) : this.references); // Restructure for (int i = 0; i < this.references.length; i++){ ref = this.references[i]; if (!resizing){ restructured[i] = null; } while (ref != null){ if (!toPurge.contains(ref) && (ref.get() != null)){ int index = getIndex(ref.getHash(), restructured); restructured[index] = this.referenceManager.createReference(ref.get(), ref.getHash(), restructured[index]); } ref = ref.getNext(); } } // Replace volatile members if (resizing){ setReferences(restructured); } this.count = Math.max(countAfterRestructure, 0); }finally{ unlock(); } } } private Reference findInChain(Reference ref,Object key,int hash){ Reference currRef = ref; while (currRef != null){ if (currRef.getHash() == hash){ Entry entry = currRef.get(); if (entry != null){ K entryKey = entry.getKey(); if (ObjectUtils.nullSafeEquals(entryKey, key)){ return currRef; } } } currRef = currRef.getNext(); } return null; } @SuppressWarnings({ "unchecked" }) private Reference[] createReferenceArray(int size){ return new Reference[size]; } private int getIndex(int hash,Reference[] references){ return (hash & (references.length - 1)); } /** * Replace the references with a new value, recalculating the resizeThreshold. * * @param references * the new references */ private void setReferences(Reference[] references){ this.references = references; this.resizeThreshold = (int) (references.length * getLoadFactor()); } /** * Return the size of the current references array. */ public final int getSize(){ return this.references.length; } /** * Return the total number of references in this segment. */ public final int getCount(){ return this.count; } } /** * A reference to an {@link Entry} contained in the map. Implementations are usually * wrappers around specific Java reference implementations (e.g., {@link SoftReference}). */ protected interface Reference { /** * Return the referenced entry, or {@code null} if the entry is no longer available. */ Entry get(); /** * Return the hash for the reference. */ int getHash(); /** * Return the next reference in the chain, or {@code null} if none. */ Reference getNext(); /** * Release this entry and ensure that it will be returned from * {@code ReferenceManager#pollForPurge()}. */ void release(); } /** * A single map entry. */ protected static final class Entry implements Map.Entry{ private final K key; private volatile V value; public Entry(K key, V value){ this.key = key; this.value = value; } @Override public K getKey(){ return this.key; } @Override public V getValue(){ return this.value; } @Override public V setValue(V value){ V previous = this.value; this.value = value; return previous; } @Override public String toString(){ return (this.key + "=" + this.value); } @Override @SuppressWarnings("rawtypes") public final boolean equals(Object other){ if (this == other){ return true; } if (!(other instanceof Map.Entry)){ return false; } Map.Entry otherEntry = (Map.Entry) other; return (ObjectUtils.nullSafeEquals(getKey(), otherEntry.getKey()) && ObjectUtils.nullSafeEquals(getValue(), otherEntry.getValue())); } @Override public final int hashCode(){ return (ObjectUtils.nullSafeHashCode(this.key) ^ ObjectUtils.nullSafeHashCode(this.value)); } } /** * A task that can be {@link Segment#doTask run} against a {@link Segment}. */ private abstract class Task { private final EnumSet options; public Task(TaskOption...options){ this.options = (options.length == 0 ? EnumSet.noneOf(TaskOption.class) : EnumSet.of(options[0], options)); } public boolean hasOption(TaskOption option){ return this.options.contains(option); } /** * Execute the task. * * @param ref * the found reference (or {@code null}) * @param entry * the found entry (or {@code null}) * @param entries * access to the underlying entries * @return the result of the task * @see #execute(Reference, Entry) */ protected T execute(Reference ref,Entry entry,Entries entries){ return execute(ref, entry); } /** * Convenience method that can be used for tasks that do not need access to {@link Entries}. * * @param ref * the found reference (or {@code null}) * @param entry * the found entry (or {@code null}) * @return the result of the task * @see #execute(Reference, Entry, Entries) */ protected T execute(@SuppressWarnings("unused") Reference ref,@SuppressWarnings("unused") Entry entry){ return null; } } /** * Various options supported by a {@code Task}. */ private enum TaskOption{ RESTRUCTURE_BEFORE, RESTRUCTURE_AFTER, SKIP_IF_EMPTY, RESIZE } /** * Allows a task access to {@link Segment} entries. */ private abstract class Entries{ /** * Add a new entry with the specified value. * * @param value * the value to add */ public abstract void add(V value); } /** * Internal entry-set implementation. */ private class EntrySet extends AbstractSet>{ @Override public Iterator> iterator(){ return new EntryIterator(); } @Override public boolean contains(Object o){ if (o instanceof Map.Entry){ Map.Entry entry = (Map.Entry) o; Reference ref = ConcurrentReferenceHashMap.this.getReference(entry.getKey(), Restructure.NEVER); Entry otherEntry = (ref != null ? ref.get() : null); if (otherEntry != null){ return ObjectUtils.nullSafeEquals(otherEntry.getValue(), otherEntry.getValue()); } } return false; } @Override public boolean remove(Object o){ if (o instanceof Map.Entry){ Map.Entry entry = (Map.Entry) o; return ConcurrentReferenceHashMap.this.remove(entry.getKey(), entry.getValue()); } return false; } @Override public int size(){ return ConcurrentReferenceHashMap.this.size(); } @Override public void clear(){ ConcurrentReferenceHashMap.this.clear(); } } /** * Internal entry iterator implementation. */ private class EntryIterator implements Iterator>{ private int segmentIndex; private int referenceIndex; private Reference[] references; private Reference reference; private Entry next; private Entry last; public EntryIterator(){ moveToNextSegment(); } @Override public boolean hasNext(){ getNextIfNecessary(); return (this.next != null); } @Override public Entry next(){ getNextIfNecessary(); if (this.next == null){ throw new NoSuchElementException(); } this.last = this.next; this.next = null; return this.last; } private void getNextIfNecessary(){ while (this.next == null){ moveToNextReference(); if (this.reference == null){ return; } this.next = this.reference.get(); } } private void moveToNextReference(){ if (this.reference != null){ this.reference = this.reference.getNext(); } while (this.reference == null && this.references != null){ if (this.referenceIndex >= this.references.length){ moveToNextSegment(); this.referenceIndex = 0; }else{ this.reference = this.references[this.referenceIndex]; this.referenceIndex++; } } } private void moveToNextSegment(){ this.reference = null; this.references = null; if (this.segmentIndex < ConcurrentReferenceHashMap.this.segments.length){ this.references = ConcurrentReferenceHashMap.this.segments[this.segmentIndex].references; this.segmentIndex++; } } @Override public void remove(){ Assert.state(this.last != null, "No element to remove"); ConcurrentReferenceHashMap.this.remove(this.last.getKey()); } } /** * The types of restructuring that can be performed. */ protected enum Restructure{ WHEN_NECESSARY, NEVER } /** * Strategy class used to manage {@link Reference}s. This class can be overridden if * alternative reference types need to be supported. */ protected class ReferenceManager{ private final ReferenceQueue> queue = new ReferenceQueue<>(); /** * Factory method used to create a new {@link Reference}. * * @param entry * the entry contained in the reference * @param hash * the hash * @param next * the next reference in the chain, or {@code null} if none * @return a new {@link Reference} */ public Reference createReference(Entry entry,int hash,Reference next){ if (ConcurrentReferenceHashMap.this.referenceType == ReferenceType.WEAK){ return new WeakEntryReference<>(entry, hash, next, this.queue); } return new SoftEntryReference<>(entry, hash, next, this.queue); } /** * Return any reference that has been garbage collected and can be purged from the * underlying structure or {@code null} if no references need purging. This * method must be thread safe and ideally should not block when returning * {@code null}. References should be returned once and only once. * * @return a reference to purge or {@code null} */ @SuppressWarnings("unchecked") public Reference pollForPurge(){ return (Reference) this.queue.poll(); } } /** * Internal {@link Reference} implementation for {@link SoftReference}s. */ private static final class SoftEntryReference extends SoftReference> implements Reference{ private final int hash; private final Reference nextReference; public SoftEntryReference(Entry entry, int hash, Reference next, ReferenceQueue> queue){ super(entry, queue); this.hash = hash; this.nextReference = next; } @Override public int getHash(){ return this.hash; } @Override public Reference getNext(){ return this.nextReference; } @Override public void release(){ enqueue(); clear(); } } /** * Internal {@link Reference} implementation for {@link WeakReference}s. */ private static final class WeakEntryReference extends WeakReference> implements Reference{ private final int hash; private final Reference nextReference; public WeakEntryReference(Entry entry, int hash, Reference next, ReferenceQueue> queue){ super(entry, queue); this.hash = hash; this.nextReference = next; } @Override public int getHash(){ return this.hash; } @Override public Reference getNext(){ return this.nextReference; } @Override public void release(){ enqueue(); clear(); } } }





© 2015 - 2024 Weber Informatics LLC | Privacy Policy