Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.testifyproject.google.common.collect.MapMakerInternalMap Maven / Gradle / Ivy
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in org.testifyproject.testifyprojectpliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org.testifyproject/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.testifyproject.testifyproject.google.org.testifyproject.testifyprojectmon.collect;
import static org.testifyproject.testifyproject.google.org.testifyproject.testifyprojectmon.base.Preconditions.checkNotNull;
import static org.testifyproject.testifyproject.google.org.testifyproject.testifyprojectmon.collect.CollectPreconditions.checkRemove;
import org.testifyproject.testifyproject.google.org.testifyproject.testifyprojectmon.annotations.GwtIncompatible;
import org.testifyproject.testifyproject.google.org.testifyproject.testifyprojectmon.annotations.VisibleForTesting;
import org.testifyproject.testifyproject.google.org.testifyproject.testifyprojectmon.base.Equivalence;
import org.testifyproject.testifyproject.google.org.testifyproject.testifyprojectmon.primitives.Ints;
import org.testifyproject.testifyproject.google.errorprone.annotations.CanIgnoreReturnValue;
import org.testifyproject.testifyproject.google.j2objc.annotations.Weak;
import org.testifyproject.testifyproject.google.j2objc.annotations.WeakOuter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.util.AbstractCollection;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.concurrent.locks.ReentrantLock;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
/**
* The concurrent hash map implementation built by {@link MapMaker}.
*
* This implementation is heavily derived from revision 1.96 of ConcurrentHashMap.java .
*
* @param the type of the keys in the map
* @param the type of the values in the map
* @param the type of the {@link InternalEntry} entry implementation used internally
* @param the type of the {@link Segment} entry implementation used internally
* @author Bob Lee
* @author Charles Fry
* @author Doug Lea ({@code ConcurrentHashMap})
*/
// TODO(kak/cpovirk): Consider removing @CanIgnoreReturnValue from this class.
@GwtIncompatible
class MapMakerInternalMap<
K,
V,
E extends MapMakerInternalMap.InternalEntry,
S extends MapMakerInternalMap.Segment>
extends AbstractMap implements ConcurrentMap, Serializable {
/*
* The basic strategy is to subdivide the table among Segments, each of which itself is a
* concurrently readable hash table. The map supports non-blocking reads and concurrent writes
* across different segments.
*
* The page replacement algorithm's data structures are kept casually consistent with the map. The
* ordering of writes to a segment is sequentially consistent. An update to the map and recording
* of reads may not be immediately reflected on the algorithm's data structures. These structures
* are guarded by a lock and operations are applied in batches to avoid lock contention. The
* penalty of applying the batches is spread across threads so that the amortized cost is slightly
* higher than performing just the operation without enforcing the capacity constraint.
*
* This implementation uses a per-segment queue to record a memento of the additions, removals,
* and accesses that were performed on the map. The queue is drained on writes and when it exceeds
* its capacity threshold.
*
* The Least Recently Used page replacement algorithm was chosen due to its simplicity, high hit
* rate, and ability to be implemented with O(1) time org.testifyproject.testifyprojectplexity. The initial LRU implementation
* operates per-segment rather than globally for increased implementation simplicity. We expect
* the cache hit rate to be similar to that of a global LRU algorithm.
*/
// Constants
/**
* The maximum capacity, used if a higher value is implicitly specified by either of the
* constructors with arguments. MUST be a power of two <= 1<<30 to ensure that entries are
* indexable using ints.
*/
static final int MAXIMUM_CAPACITY = Ints.MAX_POWER_OF_TWO;
/** The maximum number of segments to allow; used to bound constructor arguments. */
static final int MAX_SEGMENTS = 1 << 16; // slightly conservative
/** Number of (unsynchronized) retries in the containsValue method. */
static final int CONTAINS_VALUE_RETRIES = 3;
/**
* Number of cache access operations that can be buffered per segment before the cache's recency
* ordering information is updated. This is used to avoid lock contention by recording a memento
* of reads and delaying a lock acquisition until the threshold is crossed or a mutation occurs.
*
* This must be a (2^n)-1 as it is used as a mask.
*/
static final int DRAIN_THRESHOLD = 0x3F;
/**
* Maximum number of entries to be drained in a single cleanup run. This applies independently to
* the cleanup queue and both reference queues.
*/
// TODO(fry): empirically optimize this
static final int DRAIN_MAX = 16;
static final long CLEANUP_EXECUTOR_DELAY_SECS = 60;
// Fields
/**
* Mask value for indexing into segments. The upper bits of a key's hash code are used to choose
* the segment.
*/
final transient int segmentMask;
/**
* Shift value for indexing within segments. Helps prevent entries that end up in the same segment
* from also ending up in the same bucket.
*/
final transient int segmentShift;
/** The segments, each of which is a specialized hash table. */
final transient Segment[] segments;
/** The concurrency level. */
final int concurrencyLevel;
/** Strategy for org.testifyproject.testifyprojectparing keys. */
final Equivalence keyEquivalence;
/** Strategy for handling entries and segments in a type-safe and efficient manner. */
final transient InternalEntryHelper entryHelper;
/**
* Creates a new, empty map with the specified strategy, initial capacity and concurrency level.
*/
private MapMakerInternalMap(MapMaker builder, InternalEntryHelper entryHelper) {
concurrencyLevel = Math.min(builder.getConcurrencyLevel(), MAX_SEGMENTS);
keyEquivalence = builder.getKeyEquivalence();
this.entryHelper = entryHelper;
int initialCapacity = Math.min(builder.getInitialCapacity(), MAXIMUM_CAPACITY);
// Find power-of-two sizes best matching arguments. Constraints:
// (segmentCount > concurrencyLevel)
int segmentShift = 0;
int segmentCount = 1;
while (segmentCount < concurrencyLevel) {
++segmentShift;
segmentCount <<= 1;
}
this.segmentShift = 32 - segmentShift;
segmentMask = segmentCount - 1;
this.segments = newSegmentArray(segmentCount);
int segmentCapacity = initialCapacity / segmentCount;
if (segmentCapacity * segmentCount < initialCapacity) {
++segmentCapacity;
}
int segmentSize = 1;
while (segmentSize < segmentCapacity) {
segmentSize <<= 1;
}
for (int i = 0; i < this.segments.length; ++i) {
this.segments[i] = createSegment(segmentSize, MapMaker.UNSET_INT);
}
}
static MapMakerInternalMap, ?> create(
MapMaker builder) {
if (builder.getKeyStrength() == Strength.STRONG
&& builder.getValueStrength() == Strength.STRONG) {
return new MapMakerInternalMap<
K, V, StrongKeyStrongValueEntry, StrongKeyStrongValueSegment>(
builder, StrongKeyStrongValueEntry.Helper.instance());
}
if (builder.getKeyStrength() == Strength.STRONG
&& builder.getValueStrength() == Strength.WEAK) {
return new MapMakerInternalMap<
K, V, StrongKeyWeakValueEntry, StrongKeyWeakValueSegment>(
builder, StrongKeyWeakValueEntry.Helper.instance());
}
if (builder.getKeyStrength() == Strength.WEAK
&& builder.getValueStrength() == Strength.STRONG) {
return new MapMakerInternalMap<
K, V, WeakKeyStrongValueEntry, WeakKeyStrongValueSegment>(
builder, WeakKeyStrongValueEntry.Helper.instance());
}
if (builder.getKeyStrength() == Strength.WEAK && builder.getValueStrength() == Strength.WEAK) {
return new MapMakerInternalMap<
K, V, WeakKeyWeakValueEntry, WeakKeyWeakValueSegment>(
builder, WeakKeyWeakValueEntry.Helper.instance());
}
throw new AssertionError();
}
enum Strength {
STRONG {
@Override
Equivalence defaultEquivalence() {
return Equivalence.equals();
}
},
WEAK {
@Override
Equivalence defaultEquivalence() {
return Equivalence.identity();
}
};
/**
* Returns the default equivalence strategy used to org.testifyproject.testifyprojectpare and hash keys or values referenced
* at this strength. This strategy will be used unless the user explicitly specifies an
* alternate strategy.
*/
abstract Equivalence defaultEquivalence();
}
/**
* A helper object for operating on {@link InternalEntry} instances in a type-safe and efficient
* manner.
*
* For each of the four org.testifyproject.testifyprojectbinations of strong/weak key and strong/weak value, there are
* corresponding {@link InternalEntry}, {@link Segment}, and {@link InternalEntryHelper}
* implementations.
*
* @param the type of the key in each entry
* @param the type of the value in each entry
* @param the type of the {@link InternalEntry} entry implementation
* @param the type of the {@link Segment} entry implementation
*/
interface InternalEntryHelper<
K, V, E extends InternalEntry, S extends Segment> {
/** The strength of the key type in each entry. */
Strength keyStrength();
/** The strength of the value type in each entry. */
Strength valueStrength();
/** Returns a freshly created segment, typed at the {@code S} type. */
S newSegment(MapMakerInternalMap map, int initialCapacity, int maxSegmentSize);
/**
* Returns a freshly created entry, typed at the {@code E} type, for the given {@code segment}.
*/
E newEntry(S segment, K key, int hash, @Nullable E next);
/**
* Returns a freshly created entry, typed at the {@code E} type, for the given {@code segment},
* that is a copy of the given {@code entry}.
*/
E copy(S segment, E entry, @Nullable E newNext);
/**
* Sets the value of the given {@code entry} in the given {@code segment} to be the given {@code
* value}
*/
void setValue(S segment, E entry, V value);
}
/**
* An entry in a hash table of a {@link Segment}.
*
* Entries in the map can be in the following states:
*
*
Valid: - Live: valid key/value are set
*
*
Invalid: - Collected: key/value was partially collected, but not yet cleaned up
*/
interface InternalEntry> {
/** Gets the next entry in the chain. */
E getNext();
/**
* Gets the entry's hash.
*/
int getHash();
/**
* Gets the key for this entry.
*/
K getKey();
/** Gets the value for the entry. */
V getValue();
}
/*
* Note: the following classes have a lot of duplicate code. It sucks, but it saves a lot of
* memory. If only Java had mixins!
*/
/** Base class for {@link InternalEntry} implementations for strong keys. */
abstract static class AbstractStrongKeyEntry>
implements InternalEntry {
final K key;
final int hash;
final E next;
AbstractStrongKeyEntry(K key, int hash, @Nullable E next) {
this.key = key;
this.hash = hash;
this.next = next;
}
@Override
public K getKey() {
return this.key;
}
@Override
public int getHash() {
return hash;
}
@Override
public E getNext() {
return next;
}
}
/** Marker interface for {@link InternalEntry} implementations for strong values. */
interface StrongValueEntry>
extends InternalEntry {}
/** Marker interface for {@link InternalEntry} implementations for weak values. */
interface WeakValueEntry> extends InternalEntry {
/** Gets the weak value reference held by entry. */
WeakValueReference getValueReference();
/**
* Clears the weak value reference held by the entry. Should be used when the entry's value is
* overwritten.
*/
void clearValue();
}
@SuppressWarnings("unchecked") // impl never uses a parameter or returns any non-null value
static >
WeakValueReference unsetWeakValueReference() {
return (WeakValueReference) UNSET_WEAK_VALUE_REFERENCE;
}
/** Concrete implementation of {@link InternalEntry} for strong keys and strong values. */
static final class StrongKeyStrongValueEntry
extends AbstractStrongKeyEntry>
implements StrongValueEntry> {
@Nullable private volatile V value = null;
StrongKeyStrongValueEntry(K key, int hash, @Nullable StrongKeyStrongValueEntry next) {
super(key, hash, next);
}
@Override
@Nullable
public V getValue() {
return value;
}
void setValue(V value) {
this.value = value;
}
StrongKeyStrongValueEntry copy(StrongKeyStrongValueEntry newNext) {
StrongKeyStrongValueEntry newEntry =
new StrongKeyStrongValueEntry(this.key, this.hash, newNext);
newEntry.value = this.value;
return newEntry;
}
/** Concrete implementation of {@link InternalEntryHelper} for strong keys and strong values. */
static final class Helper
implements InternalEntryHelper<
K, V, StrongKeyStrongValueEntry, StrongKeyStrongValueSegment> {
private static final Helper, ?> INSTANCE = new Helper();
@SuppressWarnings("unchecked")
static Helper instance() {
return (Helper) INSTANCE;
}
@Override
public Strength keyStrength() {
return Strength.STRONG;
}
@Override
public Strength valueStrength() {
return Strength.STRONG;
}
@Override
public StrongKeyStrongValueSegment newSegment(
MapMakerInternalMap<
K, V, StrongKeyStrongValueEntry, StrongKeyStrongValueSegment>
map,
int initialCapacity,
int maxSegmentSize) {
return new StrongKeyStrongValueSegment(map, initialCapacity, maxSegmentSize);
}
@Override
public StrongKeyStrongValueEntry copy(
StrongKeyStrongValueSegment segment,
StrongKeyStrongValueEntry entry,
@Nullable StrongKeyStrongValueEntry newNext) {
return entry.copy(newNext);
}
@Override
public void setValue(
StrongKeyStrongValueSegment segment,
StrongKeyStrongValueEntry entry,
V value) {
entry.setValue(value);
}
@Override
public StrongKeyStrongValueEntry newEntry(
StrongKeyStrongValueSegment segment,
K key,
int hash,
@Nullable StrongKeyStrongValueEntry next) {
return new StrongKeyStrongValueEntry(key, hash, next);
}
}
}
/** Concrete implementation of {@link InternalEntry} for strong keys and weak values. */
static final class StrongKeyWeakValueEntry
extends AbstractStrongKeyEntry>
implements WeakValueEntry> {
private volatile WeakValueReference> valueReference =
unsetWeakValueReference();
StrongKeyWeakValueEntry(K key, int hash, @Nullable StrongKeyWeakValueEntry next) {
super(key, hash, next);
}
@Override
public V getValue() {
return valueReference.get();
}
@Override
public void clearValue() {
valueReference.clear();
}
void setValue(V value, ReferenceQueue queueForValues) {
WeakValueReference> previous = this.valueReference;
this.valueReference =
new WeakValueReferenceImpl>(
queueForValues, value, this);
previous.clear();
}
StrongKeyWeakValueEntry copy(
ReferenceQueue queueForValues, StrongKeyWeakValueEntry newNext) {
StrongKeyWeakValueEntry newEntry =
new StrongKeyWeakValueEntry(key, hash, newNext);
newEntry.valueReference = valueReference.copyFor(queueForValues, newEntry);
return newEntry;
}
@Override
public WeakValueReference> getValueReference() {
return valueReference;
}
/** Concrete implementation of {@link InternalEntryHelper} for strong keys and weak values. */
static final class Helper
implements InternalEntryHelper<
K, V, StrongKeyWeakValueEntry, StrongKeyWeakValueSegment> {
private static final Helper, ?> INSTANCE = new Helper();
@SuppressWarnings("unchecked")
static Helper instance() {
return (Helper) INSTANCE;
}
@Override
public Strength keyStrength() {
return Strength.STRONG;
}
@Override
public Strength valueStrength() {
return Strength.WEAK;
}
@Override
public StrongKeyWeakValueSegment newSegment(
MapMakerInternalMap, StrongKeyWeakValueSegment>
map,
int initialCapacity,
int maxSegmentSize) {
return new StrongKeyWeakValueSegment(map, initialCapacity, maxSegmentSize);
}
@Override
public StrongKeyWeakValueEntry copy(
StrongKeyWeakValueSegment segment,
StrongKeyWeakValueEntry entry,
@Nullable StrongKeyWeakValueEntry newNext) {
if (Segment.isCollected(entry)) {
return null;
}
return entry.copy(segment.queueForValues, newNext);
}
@Override
public void setValue(
StrongKeyWeakValueSegment segment, StrongKeyWeakValueEntry entry, V value) {
entry.setValue(value, segment.queueForValues);
}
@Override
public StrongKeyWeakValueEntry newEntry(
StrongKeyWeakValueSegment segment,
K key,
int hash,
@Nullable StrongKeyWeakValueEntry next) {
return new StrongKeyWeakValueEntry(key, hash, next);
}
}
}
/** Base class for {@link InternalEntry} implementations for weak keys. */
abstract static class AbstractWeakKeyEntry>
extends WeakReference implements InternalEntry {
final int hash;
final E next;
AbstractWeakKeyEntry(ReferenceQueue queue, K key, int hash, @Nullable E next) {
super(key, queue);
this.hash = hash;
this.next = next;
}
@Override
public K getKey() {
return get();
}
@Override
public int getHash() {
return hash;
}
@Override
public E getNext() {
return next;
}
}
/** Concrete implementation of {@link InternalEntry} for weak keys and strong values. */
static final class WeakKeyStrongValueEntry
extends AbstractWeakKeyEntry>
implements StrongValueEntry> {
@Nullable private volatile V value = null;
WeakKeyStrongValueEntry(
ReferenceQueue queue, K key, int hash, @Nullable WeakKeyStrongValueEntry next) {
super(queue, key, hash, next);
}
@Override
@Nullable
public V getValue() {
return value;
}
void setValue(V value) {
this.value = value;
}
WeakKeyStrongValueEntry copy(
ReferenceQueue queueForKeys, WeakKeyStrongValueEntry newNext) {
WeakKeyStrongValueEntry newEntry =
new WeakKeyStrongValueEntry(queueForKeys, getKey(), this.hash, newNext);
newEntry.setValue(value);
return newEntry;
}
/** Concrete implementation of {@link InternalEntryHelper} for weak keys and strong values. */
static final class Helper
implements InternalEntryHelper<
K, V, WeakKeyStrongValueEntry, WeakKeyStrongValueSegment> {
private static final Helper, ?> INSTANCE = new Helper();
@SuppressWarnings("unchecked")
static Helper instance() {
return (Helper) INSTANCE;
}
@Override
public Strength keyStrength() {
return Strength.WEAK;
}
@Override
public Strength valueStrength() {
return Strength.STRONG;
}
@Override
public WeakKeyStrongValueSegment newSegment(
MapMakerInternalMap, WeakKeyStrongValueSegment>
map,
int initialCapacity,
int maxSegmentSize) {
return new WeakKeyStrongValueSegment(map, initialCapacity, maxSegmentSize);
}
@Override
public WeakKeyStrongValueEntry copy(
WeakKeyStrongValueSegment segment,
WeakKeyStrongValueEntry entry,
@Nullable WeakKeyStrongValueEntry newNext) {
if (entry.getKey() == null) {
// key collected
return null;
}
return entry.copy(segment.queueForKeys, newNext);
}
@Override
public void setValue(
WeakKeyStrongValueSegment segment, WeakKeyStrongValueEntry entry, V value) {
entry.setValue(value);
}
@Override
public WeakKeyStrongValueEntry newEntry(
WeakKeyStrongValueSegment segment,
K key,
int hash,
@Nullable WeakKeyStrongValueEntry next) {
return new WeakKeyStrongValueEntry(segment.queueForKeys, key, hash, next);
}
}
}
/** Concrete implementation of {@link InternalEntry} for weak keys and weak values. */
static final class WeakKeyWeakValueEntry
extends AbstractWeakKeyEntry>
implements WeakValueEntry> {
private volatile WeakValueReference> valueReference =
unsetWeakValueReference();
WeakKeyWeakValueEntry(
ReferenceQueue queue, K key, int hash, @Nullable WeakKeyWeakValueEntry next) {
super(queue, key, hash, next);
}
@Override
public V getValue() {
return valueReference.get();
}
WeakKeyWeakValueEntry copy(
ReferenceQueue queueForKeys,
ReferenceQueue queueForValues,
WeakKeyWeakValueEntry newNext) {
WeakKeyWeakValueEntry newEntry =
new WeakKeyWeakValueEntry(queueForKeys, getKey(), this.hash, newNext);
newEntry.valueReference = valueReference.copyFor(queueForValues, newEntry);
return newEntry;
}
@Override
public void clearValue() {
valueReference.clear();
}
void setValue(V value, ReferenceQueue queueForValues) {
WeakValueReference> previous = this.valueReference;
this.valueReference =
new WeakValueReferenceImpl>(
queueForValues, value, this);
previous.clear();
}
@Override
public WeakValueReference> getValueReference() {
return valueReference;
}
/** Concrete implementation of {@link InternalEntryHelper} for weak keys and weak values. */
static final class Helper
implements InternalEntryHelper<
K, V, WeakKeyWeakValueEntry, WeakKeyWeakValueSegment> {
private static final Helper, ?> INSTANCE = new Helper();
@SuppressWarnings("unchecked")
static Helper instance() {
return (Helper) INSTANCE;
}
@Override
public Strength keyStrength() {
return Strength.WEAK;
}
@Override
public Strength valueStrength() {
return Strength.WEAK;
}
@Override
public WeakKeyWeakValueSegment newSegment(
MapMakerInternalMap, WeakKeyWeakValueSegment> map,
int initialCapacity,
int maxSegmentSize) {
return new WeakKeyWeakValueSegment(map, initialCapacity, maxSegmentSize);
}
@Override
public WeakKeyWeakValueEntry copy(
WeakKeyWeakValueSegment segment,
WeakKeyWeakValueEntry entry,
@Nullable WeakKeyWeakValueEntry newNext) {
if (entry.getKey() == null) {
// key collected
return null;
}
if (Segment.isCollected(entry)) {
return null;
}
return entry.copy(segment.queueForKeys, segment.queueForValues, newNext);
}
@Override
public void setValue(
WeakKeyWeakValueSegment segment, WeakKeyWeakValueEntry entry, V value) {
entry.setValue(value, segment.queueForValues);
}
@Override
public WeakKeyWeakValueEntry newEntry(
WeakKeyWeakValueSegment segment,
K key,
int hash,
@Nullable WeakKeyWeakValueEntry next) {
return new WeakKeyWeakValueEntry(segment.queueForKeys, key, hash, next);
}
}
}
/** A weakly referenced value that also has a reference to its containing entry. */
interface WeakValueReference> {
/**
* Returns the current value being referenced, or {@code null} if there is none (e.g. because
* either it got collected, or {@link #clear} was called, or it wasn't set in the first place).
*/
@Nullable
V get();
/** Returns the entry which contains this {@link WeakValueReference}. */
E getEntry();
/** Unsets the referenced value. Subsequent calls to {@link get} will return {@code null}. */
void clear();
/**
* Returns a freshly created {@link WeakValueReference} for the given {@code entry} (and on the
* given {@code queue} with the same value as this {@link WeakValueReference}.
*/
WeakValueReference copyFor(ReferenceQueue queue, E entry);
}
/**
* A dummy implementation of {@link InternalEntry}, solely for use in the type signature of {@link
* #UNSET_WEAK_VALUE_REFERENCE} below.
*/
static final class DummyInternalEntry
implements InternalEntry {
private DummyInternalEntry() {
throw new AssertionError();
}
@Override
public DummyInternalEntry getNext() {
throw new AssertionError();
}
@Override
public int getHash() {
throw new AssertionError();
}
@Override
public Object getKey() {
throw new AssertionError();
}
@Override
public Object getValue() {
throw new AssertionError();
}
}
/**
* A singleton {@link WeakValueReference} used to denote an unset value in a entry with weak
* values.
*/
static final WeakValueReference UNSET_WEAK_VALUE_REFERENCE =
new WeakValueReference() {
@Override
public DummyInternalEntry getEntry() {
return null;
}
@Override
public void clear() {}
@Override
public Object get() {
return null;
}
@Override
public WeakValueReference copyFor(
ReferenceQueue queue, DummyInternalEntry entry) {
return this;
}
};
/** Concrete implementation of {@link WeakValueReference}. */
static final class WeakValueReferenceImpl>
extends WeakReference implements WeakValueReference {
final E entry;
WeakValueReferenceImpl(ReferenceQueue queue, V referent, E entry) {
super(referent, queue);
this.entry = entry;
}
@Override
public E getEntry() {
return entry;
}
@Override
public WeakValueReference copyFor(ReferenceQueue queue, E entry) {
return new WeakValueReferenceImpl(queue, get(), entry);
}
}
/**
* Applies a supplemental hash function to a given hash code, which defends against poor quality
* hash functions. This is critical when the concurrent hash map uses power-of-two length hash
* tables, that otherwise encounter collisions for hash codes that do not differ in lower or
* upper bits.
*
* @param h hash code
*/
static int rehash(int h) {
// Spread bits to regularize both segment and index locations,
// using variant of single-word Wang/Jenkins hash.
// TODO(kevinb): use Hashing/move this to Hashing?
h += (h << 15) ^ 0xffffcd7d;
h ^= (h >>> 10);
h += (h << 3);
h ^= (h >>> 6);
h += (h << 2) + (h << 14);
return h ^ (h >>> 16);
}
/**
* This method is a convenience for testing. Code should call {@link Segment#copyEntry} directly.
*/
// Guarded By Segment.this
@VisibleForTesting
E copyEntry(E original, E newNext) {
int hash = original.getHash();
return segmentFor(hash).copyEntry(original, newNext);
}
int hash(Object key) {
int h = keyEquivalence.hash(key);
return rehash(h);
}
void reclaimValue(WeakValueReference valueReference) {
E entry = valueReference.getEntry();
int hash = entry.getHash();
segmentFor(hash).reclaimValue(entry.getKey(), hash, valueReference);
}
void reclaimKey(E entry) {
int hash = entry.getHash();
segmentFor(hash).reclaimKey(entry, hash);
}
/**
* This method is a convenience for testing. Code should call {@link Segment#getLiveValue}
* instead.
*/
@VisibleForTesting
boolean isLiveForTesting(InternalEntry entry) {
return segmentFor(entry.getHash()).getLiveValueForTesting(entry) != null;
}
/**
* Returns the segment that should be used for a key with the given hash.
*
* @param hash the hash code for the key
* @return the segment
*/
Segment segmentFor(int hash) {
// TODO(fry): Lazily create segments?
return segments[(hash >>> segmentShift) & segmentMask];
}
Segment createSegment(int initialCapacity, int maxSegmentSize) {
return entryHelper.newSegment(this, initialCapacity, maxSegmentSize);
}
/**
* Gets the value from an entry. Returns {@code null} if the entry is invalid, partially-collected
* or org.testifyproject.testifyprojectputing.
*/
V getLiveValue(E entry) {
if (entry.getKey() == null) {
return null;
}
V value = entry.getValue();
if (value == null) {
return null;
}
return value;
}
@SuppressWarnings("unchecked")
final Segment[] newSegmentArray(int ssize) {
return new Segment[ssize];
}
// Inner Classes
/**
* Segments are specialized versions of hash tables. This subclass inherits from ReentrantLock
* opportunistically, just to simplify some locking and avoid separate construction.
*/
@SuppressWarnings("serial") // This class is never serialized.
abstract static class Segment<
K, V, E extends InternalEntry, S extends Segment>
extends ReentrantLock {
/*
* Segments maintain a table of entry lists that are ALWAYS kept in a consistent state, so can
* be read without locking. Next fields of nodes are immutable (final). All list additions are
* performed at the front of each bin. This makes it easy to check changes, and also fast to
* traverse. When nodes would otherwise be changed, new nodes are created to replace them. This
* works well for hash tables since the bin lists tend to be short. (The average length is less
* than two.)
*
* Read operations can thus proceed without locking, but rely on selected uses of volatiles to
* ensure that org.testifyproject.testifyprojectpleted write operations performed by other threads are noticed. For most
* purposes, the "count" field, tracking the number of elements, serves as that volatile
* variable ensuring visibility. This is convenient because this field needs to be read in many
* read operations anyway:
*
* - All (unsynchronized) read operations must first read the "count" field, and should not
* look at table entries if it is 0.
*
* - All (synchronized) write operations should write to the "count" field after structurally
* changing any bin. The operations must not take any action that could even momentarily
* cause a concurrent read operation to see inconsistent data. This is made easier by the
* nature of the read operations in Map. For example, no operation can reveal that the table
* has grown but the threshold has not yet been updated, so there are no atomicity requirements
* for this with respect to reads.
*
* As a guide, all critical volatile reads and writes to the count field are marked in code
* org.testifyproject.testifyprojectments.
*/
@Weak final MapMakerInternalMap map;
/**
* The number of live elements in this segment's region. This does not include unset elements
* which are awaiting cleanup.
*/
volatile int count;
/**
* Number of updates that alter the size of the table. This is used during bulk-read methods to
* make sure they see a consistent snapshot: If modCounts change during a traversal of segments
* org.testifyproject.testifyprojectputing size or checking containsValue, then we might have an inconsistent view of state
* so (usually) must retry.
*/
int modCount;
/**
* The table is expanded when its size exceeds this threshold. (The value of this field is
* always {@code (int) (capacity * 0.75)}.)
*/
int threshold;
/** The per-segment table. */
volatile AtomicReferenceArray table;
/**
* The maximum size of this map. MapMaker.UNSET_INT if there is no maximum.
*/
final int maxSegmentSize;
/**
* A counter of the number of reads since the last write, used to drain queues on a small
* fraction of read operations.
*/
final AtomicInteger readCount = new AtomicInteger();
Segment(MapMakerInternalMap map, int initialCapacity, int maxSegmentSize) {
this.map = map;
this.maxSegmentSize = maxSegmentSize;
initTable(newEntryArray(initialCapacity));
}
/**
* Returns {@code this} up-casted to the specific {@link Segment} implementation type {@code S}.
*
* This method exists so that the {@link Segment} code can be generic in terms of {@code S},
* the type of the concrete implementation.
*/
abstract S self();
/** Drains the reference queues used by this segment, if any. */
@GuardedBy("this")
void maybeDrainReferenceQueues() {}
/** Clears the reference queues used by this segment, if any. */
void maybeClearReferenceQueues() {}
/** Sets the value of the given {@code entry}. */
void setValue(E entry, V value) {
this.map.entryHelper.setValue(self(), entry, value);
}
/** Returns a copy of the given {@code entry}. */
E copyEntry(E original, E newNext) {
return this.map.entryHelper.copy(self(), original, newNext);
}
AtomicReferenceArray newEntryArray(int size) {
return new AtomicReferenceArray(size);
}
void initTable(AtomicReferenceArray newTable) {
this.threshold = newTable.length() * 3 / 4; // 0.75
if (this.threshold == maxSegmentSize) {
// prevent spurious expansion before eviction
this.threshold++;
}
this.table = newTable;
}
// Convenience methods for testing
/**
* Unsafe cast of the given entry to {@code E}, the type of the specific {@link InternalEntry}
* implementation type.
*
* This method is provided as a convenience for tests. Otherwise they'd need to be
* knowledgable about all the implementation details of our type system trickery.
*/
abstract E castForTesting(InternalEntry entry);
/** Unsafely extracts the key reference queue used by this segment. */
ReferenceQueue getKeyReferenceQueueForTesting() {
throw new AssertionError();
}
/** Unsafely extracts the value reference queue used by this segment. */
ReferenceQueue getValueReferenceQueueForTesting() {
throw new AssertionError();
}
/** Unsafely extracts the weak value reference inside of the given {@code entry}. */
WeakValueReference getWeakValueReferenceForTesting(InternalEntry entry) {
throw new AssertionError();
}
/**
* Unsafely creates of a fresh {@link WeakValueReference}, referencing the given {@code value},
* for the given {@code entry}
*/
WeakValueReference newWeakValueReferenceForTesting(
InternalEntry entry, V value) {
throw new AssertionError();
}
/**
* Unsafely sets the weak value reference inside the given {@code entry} to be the given {@link
* valueReference}
*/
void setWeakValueReferenceForTesting(
InternalEntry entry,
WeakValueReference> valueReference) {
throw new AssertionError();
}
/**
* Unsafely sets the given index of this segment's internal hash table to be the given entry.
*/
void setTableEntryForTesting(int i, InternalEntry entry) {
table.set(i, castForTesting(entry));
}
/** Unsafely returns a copy of the given entry. */
E copyForTesting(InternalEntry entry, @Nullable InternalEntry newNext) {
return this.map.entryHelper.copy(self(), castForTesting(entry), castForTesting(newNext));
}
/** Unsafely sets the value of the given entry. */
void setValueForTesting(InternalEntry entry, V value) {
this.map.entryHelper.setValue(self(), castForTesting(entry), value);
}
/** Unsafely returns a fresh entry. */
E newEntryForTesting(K key, int hash, @Nullable InternalEntry next) {
return this.map.entryHelper.newEntry(self(), key, hash, castForTesting(next));
}
/** Unsafely removes the given entry from this segment's hash table. */
@CanIgnoreReturnValue
boolean removeTableEntryForTesting(InternalEntry entry) {
return removeEntryForTesting(castForTesting(entry));
}
/** Unsafely removes the given entry from the given chain in this segment's hash table. */
E removeFromChainForTesting(InternalEntry first, InternalEntry entry) {
return removeFromChain(castForTesting(first), castForTesting(entry));
}
/**
* Unsafely returns the value of the given entry if it's still live, or {@code null} otherwise.
*/
@Nullable
V getLiveValueForTesting(InternalEntry entry) {
return getLiveValue(castForTesting(entry));
}
// reference queues, for garbage collection cleanup
/**
* Cleanup collected entries when the lock is available.
*/
void tryDrainReferenceQueues() {
if (tryLock()) {
try {
maybeDrainReferenceQueues();
} finally {
unlock();
}
}
}
@GuardedBy("this")
void drainKeyReferenceQueue(ReferenceQueue keyReferenceQueue) {
Reference extends K> ref;
int i = 0;
while ((ref = keyReferenceQueue.poll()) != null) {
@SuppressWarnings("unchecked")
E entry = (E) ref;
map.reclaimKey(entry);
if (++i == DRAIN_MAX) {
break;
}
}
}
@GuardedBy("this")
void drainValueReferenceQueue(ReferenceQueue valueReferenceQueue) {
Reference extends V> ref;
int i = 0;
while ((ref = valueReferenceQueue.poll()) != null) {
@SuppressWarnings("unchecked")
WeakValueReference valueReference = (WeakValueReference) ref;
map.reclaimValue(valueReference);
if (++i == DRAIN_MAX) {
break;
}
}
}
void clearReferenceQueue(ReferenceQueue referenceQueue) {
while (referenceQueue.poll() != null) {}
}
/** Returns first entry of bin for given hash. */
E getFirst(int hash) {
// read this volatile field only once
AtomicReferenceArray table = this.table;
return table.get(hash & (table.length() - 1));
}
// Specialized implementations of map methods
E getEntry(Object key, int hash) {
if (count != 0) { // read-volatile
for (E e = getFirst(hash); e != null; e = e.getNext()) {
if (e.getHash() != hash) {
continue;
}
K entryKey = e.getKey();
if (entryKey == null) {
tryDrainReferenceQueues();
continue;
}
if (map.keyEquivalence.equivalent(key, entryKey)) {
return e;
}
}
}
return null;
}
E getLiveEntry(Object key, int hash) {
return getEntry(key, hash);
}
V get(Object key, int hash) {
try {
E e = getLiveEntry(key, hash);
if (e == null) {
return null;
}
V value = e.getValue();
if (value == null) {
tryDrainReferenceQueues();
}
return value;
} finally {
postReadCleanup();
}
}
boolean containsKey(Object key, int hash) {
try {
if (count != 0) { // read-volatile
E e = getLiveEntry(key, hash);
return e != null && e.getValue() != null;
}
return false;
} finally {
postReadCleanup();
}
}
/**
* This method is a convenience for testing. Code should call {@link
* MapMakerInternalMap#containsValue} directly.
*/
@VisibleForTesting
boolean containsValue(Object value) {
try {
if (count != 0) { // read-volatile
AtomicReferenceArray table = this.table;
int length = table.length();
for (int i = 0; i < length; ++i) {
for (E e = table.get(i); e != null; e = e.getNext()) {
V entryValue = getLiveValue(e);
if (entryValue == null) {
continue;
}
if (map.valueEquivalence().equivalent(value, entryValue)) {
return true;
}
}
}
}
return false;
} finally {
postReadCleanup();
}
}
V put(K key, int hash, V value, boolean onlyIfAbsent) {
lock();
try {
preWriteCleanup();
int newCount = this.count + 1;
if (newCount > this.threshold) { // ensure capacity
expand();
newCount = this.count + 1;
}
AtomicReferenceArray table = this.table;
int index = hash & (table.length() - 1);
E first = table.get(index);
// Look for an existing entry.
for (E e = first; e != null; e = e.getNext()) {
K entryKey = e.getKey();
if (e.getHash() == hash
&& entryKey != null
&& map.keyEquivalence.equivalent(key, entryKey)) {
// We found an existing entry.
V entryValue = e.getValue();
if (entryValue == null) {
++modCount;
setValue(e, value);
newCount = this.count; // count remains unchanged
this.count = newCount; // write-volatile
return null;
} else if (onlyIfAbsent) {
// Mimic
// "if (!map.containsKey(key)) ...
// else return map.get(key);
return entryValue;
} else {
// clobber existing entry, count remains unchanged
++modCount;
setValue(e, value);
return entryValue;
}
}
}
// Create a new entry.
++modCount;
E newEntry = map.entryHelper.newEntry(self(), key, hash, first);
setValue(newEntry, value);
table.set(index, newEntry);
this.count = newCount; // write-volatile
return null;
} finally {
unlock();
}
}
/**
* Expands the table if possible.
*/
@GuardedBy("this")
void expand() {
AtomicReferenceArray oldTable = table;
int oldCapacity = oldTable.length();
if (oldCapacity >= MAXIMUM_CAPACITY) {
return;
}
/*
* Reclassify nodes in each list to new Map. Because we are using power-of-two expansion, the
* elements from each bin must either stay at same index, or move with a power of two offset.
* We eliminate unnecessary node creation by catching cases where old nodes can be reused
* because their next fields won't change. Statistically, at the default threshold, only
* about one-sixth of them need cloning when a table doubles. The nodes they replace will be
* garbage collectable as soon as they are no longer referenced by any reader thread that may
* be in the midst of traversing table right now.
*/
int newCount = count;
AtomicReferenceArray newTable = newEntryArray(oldCapacity << 1);
threshold = newTable.length() * 3 / 4;
int newMask = newTable.length() - 1;
for (int oldIndex = 0; oldIndex < oldCapacity; ++oldIndex) {
// We need to guarantee that any existing reads of old Map can
// proceed. So we cannot yet null out each bin.
E head = oldTable.get(oldIndex);
if (head != null) {
E next = head.getNext();
int headIndex = head.getHash() & newMask;
// Single node on list
if (next == null) {
newTable.set(headIndex, head);
} else {
// Reuse the consecutive sequence of nodes with the same target
// index from the end of the list. tail points to the first
// entry in the reusable list.
E tail = head;
int tailIndex = headIndex;
for (E e = next; e != null; e = e.getNext()) {
int newIndex = e.getHash() & newMask;
if (newIndex != tailIndex) {
// The index changed. We'll need to copy the previous entry.
tailIndex = newIndex;
tail = e;
}
}
newTable.set(tailIndex, tail);
// Clone nodes leading up to the tail.
for (E e = head; e != tail; e = e.getNext()) {
int newIndex = e.getHash() & newMask;
E newNext = newTable.get(newIndex);
E newFirst = copyEntry(e, newNext);
if (newFirst != null) {
newTable.set(newIndex, newFirst);
} else {
newCount--;
}
}
}
}
}
table = newTable;
this.count = newCount;
}
boolean replace(K key, int hash, V oldValue, V newValue) {
lock();
try {
preWriteCleanup();
AtomicReferenceArray table = this.table;
int index = hash & (table.length() - 1);
E first = table.get(index);
for (E e = first; e != null; e = e.getNext()) {
K entryKey = e.getKey();
if (e.getHash() == hash
&& entryKey != null
&& map.keyEquivalence.equivalent(key, entryKey)) {
// If the value disappeared, this entry is partially collected,
// and we should pretend like it doesn't exist.
V entryValue = e.getValue();
if (entryValue == null) {
if (isCollected(e)) {
int newCount = this.count - 1;
++modCount;
E newFirst = removeFromChain(first, e);
newCount = this.count - 1;
table.set(index, newFirst);
this.count = newCount; // write-volatile
}
return false;
}
if (map.valueEquivalence().equivalent(oldValue, entryValue)) {
++modCount;
setValue(e, newValue);
return true;
} else {
// Mimic
// "if (map.containsKey(key) && map.get(key).equals(oldValue))..."
return false;
}
}
}
return false;
} finally {
unlock();
}
}
V replace(K key, int hash, V newValue) {
lock();
try {
preWriteCleanup();
AtomicReferenceArray table = this.table;
int index = hash & (table.length() - 1);
E first = table.get(index);
for (E e = first; e != null; e = e.getNext()) {
K entryKey = e.getKey();
if (e.getHash() == hash
&& entryKey != null
&& map.keyEquivalence.equivalent(key, entryKey)) {
// If the value disappeared, this entry is partially collected,
// and we should pretend like it doesn't exist.
V entryValue = e.getValue();
if (entryValue == null) {
if (isCollected(e)) {
int newCount = this.count - 1;
++modCount;
E newFirst = removeFromChain(first, e);
newCount = this.count - 1;
table.set(index, newFirst);
this.count = newCount; // write-volatile
}
return null;
}
++modCount;
setValue(e, newValue);
return entryValue;
}
}
return null;
} finally {
unlock();
}
}
@CanIgnoreReturnValue
V remove(Object key, int hash) {
lock();
try {
preWriteCleanup();
int newCount = this.count - 1;
AtomicReferenceArray table = this.table;
int index = hash & (table.length() - 1);
E first = table.get(index);
for (E e = first; e != null; e = e.getNext()) {
K entryKey = e.getKey();
if (e.getHash() == hash
&& entryKey != null
&& map.keyEquivalence.equivalent(key, entryKey)) {
V entryValue = e.getValue();
if (entryValue != null) {
// TODO(kak): Remove this branch
} else if (isCollected(e)) {
// TODO(kak): Remove this branch
} else {
return null;
}
++modCount;
E newFirst = removeFromChain(first, e);
newCount = this.count - 1;
table.set(index, newFirst);
this.count = newCount; // write-volatile
return entryValue;
}
}
return null;
} finally {
unlock();
}
}
boolean remove(Object key, int hash, Object value) {
lock();
try {
preWriteCleanup();
int newCount = this.count - 1;
AtomicReferenceArray table = this.table;
int index = hash & (table.length() - 1);
E first = table.get(index);
for (E e = first; e != null; e = e.getNext()) {
K entryKey = e.getKey();
if (e.getHash() == hash
&& entryKey != null
&& map.keyEquivalence.equivalent(key, entryKey)) {
V entryValue = e.getValue();
boolean explicitRemoval = false;
if (map.valueEquivalence().equivalent(value, entryValue)) {
explicitRemoval = true;
} else if (isCollected(e)) {
// TODO(kak): Remove this branch
} else {
return false;
}
++modCount;
E newFirst = removeFromChain(first, e);
newCount = this.count - 1;
table.set(index, newFirst);
this.count = newCount; // write-volatile
return explicitRemoval;
}
}
return false;
} finally {
unlock();
}
}
void clear() {
if (count != 0) {
lock();
try {
AtomicReferenceArray