edu.stanford.ppl.concurrent.SnapTreeMap Maven / Gradle / Ivy
Show all versions of snaptree Show documentation
/*
* Copyright (c) 2009 Stanford University, unless otherwise specified.
* All rights reserved.
*
* This software was developed by the Pervasive Parallelism Laboratory of
* Stanford University, California, USA.
*
* Permission to use, copy, modify, and distribute this software in source
* or binary form for any purpose with or without fee is hereby granted,
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of Stanford University nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
package edu.stanford.ppl.concurrent;
import java.io.*;
import java.util.*;
import java.util.concurrent.ConcurrentNavigableMap;
// TODO: optimized buildFromSorted
// TODO: submap.clone()
/** A concurrent AVL tree with fast cloning, based on the algorithm of Bronson,
* Casper, Chafi, and Olukotun, "A Practical Concurrent Binary Search Tree"
* published in PPoPP'10. To simplify the locking protocols rebalancing work
* is performed in pieces, and some removed keys are be retained as routing
* nodes in the tree.
*
* This data structure honors all of the contracts of {@link
* java.util.concurrent.ConcurrentSkipListMap}, with the additional contract
* that clone, size, toArray, and iteration are linearizable (atomic).
*
*
The tree uses optimistic concurrency control. No locks are usually
* required for get, containsKey, firstKey, firstEntry, lastKey, or lastEntry.
* Reads are not lock free (or even obstruction free), but obstructing threads
* perform no memory allocation, system calls, or loops, which seems to work
* okay in practice. All of the updates to the tree are performed in fixed-
* size blocks, so restoration of the AVL balance criteria may occur after a
* change to the tree has linearized (but before the mutating operation has
* returned). The tree is always properly balanced when quiescent.
*
*
To clone the tree (or produce a snapshot for consistent iteration) the
* root node is marked as shared, which must be (*) done while there are no
* pending mutations. New mutating operations are blocked if a mark is
* pending, and when existing mutating operations are completed the mark is
* made.
* * - It would be less disruptive if we immediately marked the root as
* shared, and then waited for pending operations that might not have seen the
* mark without blocking new mutations. This could result in imbalance being
* frozen into the shared portion of the tree, though. To minimize the
* problem we perform the mark and reenable mutation on whichever thread
* notices that the entry count has become zero, to reduce context switches on
* the critical path.
*
*
The same multi-cache line data structure required for efficiently
* tracking the entry and exit for mutating operations is used to maintain the
* current size of the tree. This means that the size can be computed by
* quiescing as for a clone, but without doing any marking.
*
*
Range queries such as higherKey are not amenable to the optimistic
* hand-over-hand locking scheme used for exact searches, so they are
* implemented with pessimistic concurrency control. Mutation can be
* considered to acquire a lock on the map in Intention-eXclusive mode, range
* queries, size(), and root marking acquire the lock in Shared mode.
*
* @author Nathan Bronson
*/
public class SnapTreeMap extends AbstractMap implements ConcurrentNavigableMap, Cloneable, Serializable {
private static final long serialVersionUID = 9052695062720473599L;
/** If false, null values will trigger a NullPointerException. When false,
* this map acts exactly like a ConcurrentSkipListMap, except for the
* running time of the methods. The ability to get a snapshot reduces the
* potential ambiguity between null values and absent entries, so I'm not
* sure what the default should be.
*/
static final boolean AllowNullValues = false;
/** This is a special value that indicates the presence of a null value,
* to differentiate from the absence of a value. Only used when
* {@link #AllowNullValues} is true.
*/
static final Object SpecialNull = new Object();
/** This is a special value that indicates that an optimistic read
* failed.
*/
static final Object SpecialRetry = new Object();
/** The number of spins before yielding. */
static final int SpinCount = Integer.parseInt(System.getProperty("snaptree.spin", "100"));
/** The number of yields before blocking. */
static final int YieldCount = Integer.parseInt(System.getProperty("snaptree.yield", "0"));
// we encode directions as characters
static final char Left = 'L';
static final char Right = 'R';
/** An OVL is a version number and lock used for optimistic
* concurrent control of some program invariant. If {@link #isShrinking}
* then the protected invariant is changing. If two reads of an OVL are
* performed that both see the same non-changing value, the reader may
* conclude that no changes to the protected invariant occurred between
* the two reads. The special value UnlinkedOVL is not changing, and is
* guaranteed to not result from a normal sequence of beginChange and
* endChange operations.
*
* For convenience endChange(ovl) == endChange(beginChange(ovl)).
*/
static long beginChange(long ovl) { return ovl | 1; }
static long endChange(long ovl) { return (ovl | 3) + 1; }
static final long UnlinkedOVL = 2;
static boolean isShrinking(long ovl) { return (ovl & 1) != 0; }
static boolean isUnlinked(long ovl) { return (ovl & 2) != 0; }
static boolean isShrinkingOrUnlinked(long ovl) { return (ovl & 3) != 0L; }
private static class Node implements Map.Entry {
final K key;
volatile int height;
/** null means this node is conceptually not present in the map.
* SpecialNull means the value is null.
*/
volatile Object vOpt;
volatile Node parent;
volatile long shrinkOVL;
volatile Node left;
volatile Node right;
Node(final K key,
final int height,
final Object vOpt,
final Node parent,
final long shrinkOVL,
final Node left,
final Node right)
{
this.key = key;
this.height = height;
this.vOpt = vOpt;
this.parent = parent;
this.shrinkOVL = shrinkOVL;
this.left = left;
this.right = right;
}
@Override
public K getKey() { return key; }
@Override
@SuppressWarnings("unchecked")
public V getValue() {
final Object tmp = vOpt;
if (AllowNullValues) {
return tmp == SpecialNull ? null : (V)tmp;
} else {
return (V)tmp;
}
}
@Override
public V setValue(final V v) {
throw new UnsupportedOperationException();
}
Node child(char dir) { return dir == Left ? left : right; }
void setChild(char dir, Node node) {
if (dir == Left) {
left = node;
} else {
right = node;
}
}
//////// copy-on-write stuff
private static boolean isShared(final Node node) {
return node != null && node.parent == null;
}
static Node markShared(final Node node) {
if (node != null) {
node.parent = null;
}
return node;
}
private Node lazyCopy(Node newParent) {
assert (isShared(this));
assert (!isShrinkingOrUnlinked(shrinkOVL));
return new Node(key, height, vOpt, newParent, 0L, markShared(left), markShared(right));
}
Node unsharedLeft() {
final Node cl = left;
if (!isShared(cl)) {
return cl;
} else {
lazyCopyChildren();
return left;
}
}
Node unsharedRight() {
final Node cr = right;
if (!isShared(cr)) {
return cr;
} else {
lazyCopyChildren();
return right;
}
}
Node unsharedChild(final char dir) {
return dir == Left ? unsharedLeft() : unsharedRight();
}
private synchronized void lazyCopyChildren() {
final Node cl = left;
if (isShared(cl)) {
left = cl.lazyCopy(this);
}
final Node cr = right;
if (isShared(cr)) {
right = cr.lazyCopy(this);
}
}
//////// per-node blocking
private void waitUntilShrinkCompleted(final long ovl) {
if (!isShrinking(ovl)) {
return;
}
for (int tries = 0; tries < SpinCount; ++tries) {
if (shrinkOVL != ovl) {
return;
}
}
for (int tries = 0; tries < YieldCount; ++tries) {
Thread.yield();
if (shrinkOVL != ovl) {
return;
}
}
// spin and yield failed, use the nuclear option
synchronized (this) {
// we can't have gotten the lock unless the shrink was over
}
assert(shrinkOVL != ovl);
}
int validatedHeight() {
final int hL = left == null ? 0 : left.validatedHeight();
final int hR = right == null ? 0 : right.validatedHeight();
assert(Math.abs(hL - hR) <= 1);
final int h = 1 + Math.max(hL, hR);
assert(h == height);
return height;
}
//////// SubMap.size() helper
static int computeFrozenSize(Node root,
Comparable super K> fromCmp,
boolean fromIncl,
final Comparable super K> toCmp,
final boolean toIncl) {
int result = 0;
while (true) {
if (root == null) {
return result;
}
if (fromCmp != null) {
final int c = fromCmp.compareTo(root.key);
if (c > 0 || (c == 0 && !fromIncl)) {
// all matching nodes are on the right side
root = root.right;
continue;
}
}
if (toCmp != null) {
final int c = toCmp.compareTo(root.key);
if (c < 0 || (c == 0 && !toIncl)) {
// all matching nodes are on the left side
root = root.left;
continue;
}
}
// Current node matches. Nodes on left no longer need toCmp, nodes
// on right no longer need fromCmp.
if (root.vOpt != null) {
++result;
}
result += computeFrozenSize(root.left, fromCmp, fromIncl, null, false);
fromCmp = null;
root = root.right;
}
}
//////// Map.Entry stuff
@Override
public boolean equals(final Object o) {
if (!(o instanceof Map.Entry)) {
return false;
}
final Map.Entry rhs = (Map.Entry)o;
return eq(key, rhs.getKey()) && eq(getValue(), rhs.getValue());
}
private static boolean eq(final Object o1, final Object o2) {
return o1 == null ? o2 == null : o1.equals(o2);
}
@Override
public int hashCode() {
return (key == null ? 0 : key.hashCode()) ^
(getValue() == null ? 0 : getValue().hashCode());
}
@Override
public String toString() {
return key + "=" + getValue();
}
}
private static class RootHolder extends Node {
RootHolder() {
super(null, 1, null, null, 0L, null, null);
}
RootHolder(final RootHolder snapshot) {
super(null, 1 + snapshot.height, null, null, 0L, null, snapshot.right);
}
}
private static class COWMgr extends CopyOnWriteManager> {
COWMgr() {
super(new RootHolder(), 0);
}
COWMgr(final RootHolder initialValue, final int initialSize) {
super(initialValue, initialSize);
}
protected RootHolder freezeAndClone(final RootHolder value) {
Node.markShared(value.right);
return new RootHolder(value);
}
protected RootHolder cloneFrozen(final RootHolder frozenValue) {
return new RootHolder(frozenValue);
}
}
//////// node access functions
private static int height(final Node,?> node) {
return node == null ? 0 : node.height;
}
@SuppressWarnings("unchecked")
private V decodeNull(final Object vOpt) {
assert (vOpt != SpecialRetry);
if (AllowNullValues) {
return vOpt == SpecialNull ? null : (V)vOpt;
} else {
return (V)vOpt;
}
}
private static Object encodeNull(final Object v) {
if (AllowNullValues) {
return v == null ? SpecialNull : v;
} else {
if (v == null) {
throw new NullPointerException();
}
return v;
}
}
//////////////// state
private final Comparator super K> comparator;
private transient volatile COWMgr holderRef;
//////////////// public interface
public SnapTreeMap() {
this.comparator = null;
this.holderRef = new COWMgr();
}
public SnapTreeMap(final Comparator super K> comparator) {
this.comparator = comparator;
this.holderRef = new COWMgr();
}
public SnapTreeMap(final Map extends K, ? extends V> source) {
this.comparator = null;
this.holderRef = new COWMgr();
putAll(source);
}
public SnapTreeMap(final SortedMap source) {
this.comparator = source.comparator();
if (source instanceof SnapTreeMap) {
final SnapTreeMap s = (SnapTreeMap) source;
this.holderRef = (COWMgr) s.holderRef.clone();
}
else {
// TODO: take advantage of the sort order
// for now we optimize only by bypassing the COWMgr
int size = 0;
final RootHolder holder = new RootHolder();
for (Map.Entry e : source.entrySet()) {
final K k = e.getKey();
final V v = e.getValue();
if (k == null) {
throw new NullPointerException("source map contained a null key");
}
if (!AllowNullValues && v == null) {
throw new NullPointerException("source map contained a null value");
}
updateUnderRoot(k, comparable(k), UpdateAlways, null, encodeNull(v), holder);
++size;
}
this.holderRef = new COWMgr(holder, size);
}
}
@SuppressWarnings("unchecked")
@Override
public SnapTreeMap clone() {
final SnapTreeMap copy;
try {
copy = (SnapTreeMap) super.clone();
} catch (final CloneNotSupportedException xx) {
throw new InternalError();
}
assert(copy.comparator == comparator);
copy.holderRef = (COWMgr) holderRef.clone();
return copy;
}
@Override
public int size() {
return holderRef.size();
}
@Override
public boolean isEmpty() {
// removed-but-not-unlinked nodes cannot be leaves, so if the tree is
// truly empty then the root holder has no right child
return holderRef.read().right == null;
}
@Override
public void clear() {
holderRef = new COWMgr();
}
@Override
public Comparator super K> comparator() {
return comparator;
}
@Override
public boolean containsValue(final Object value) {
// apply the same null policy as the rest of the code, but fall
// back to the default implementation
encodeNull(value);
return super.containsValue(value);
}
//////// concurrent search
@Override
public boolean containsKey(final Object key) {
return getImpl(key) != null;
}
@Override
public V get(final Object key) {
return decodeNull(getImpl(key));
}
@SuppressWarnings("unchecked")
private Comparable super K> comparable(final Object key) {
if (key == null) {
throw new NullPointerException();
}
if (comparator == null) {
return (Comparable super K>)key;
}
return new Comparable() {
final Comparator super K> _cmp = comparator;
@SuppressWarnings("unchecked")
public int compareTo(final K rhs) { return _cmp.compare((K)key, rhs); }
};
}
/** Returns either a value or SpecialNull, if present, or null, if absent. */
private Object getImpl(final Object key) {
final Comparable super K> k = comparable(key);
while (true) {
final Node right = holderRef.read().right;
if (right == null) {
return null;
} else {
final int rightCmp = k.compareTo(right.key);
if (rightCmp == 0) {
// who cares how we got here
return right.vOpt;
}
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holderRef.read().right) {
// the reread of .right is the one protected by our read of ovl
final Object vo = attemptGet(k, right, (rightCmp < 0 ? Left : Right), ovl);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
private Object attemptGet(final Comparable super K> k,
final Node node,
final char dirToC,
final long nodeOVL) {
while (true) {
final Node child = node.child(dirToC);
if (child == null) {
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// Note is not present. Read of node.child occurred while
// parent.child was valid, so we were not affected by any
// shrinks.
return null;
} else {
final int childCmp = k.compareTo(child.key);
if (childCmp == 0) {
// how we got here is irrelevant
return child.vOpt;
}
// child is non-null
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else if (child != node.child(dirToC)) {
// this .child is the one that is protected by childOVL
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else {
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// At this point we know that the traversal our parent took
// to get to node is still valid. The recursive
// implementation will validate the traversal from node to
// child, so just prior to the nodeOVL validation both
// traversals were definitely okay. This means that we are
// no longer vulnerable to node shrinks, and we don't need
// to validate nodeOVL any more.
final Object vo = attemptGet(k, child, (childCmp < 0 ? Left : Right), childOVL);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
@Override
public K firstKey() {
return extremeKeyOrThrow(Left);
}
@Override
@SuppressWarnings("unchecked")
public Map.Entry firstEntry() {
return (SimpleImmutableEntry) extreme(false, Left);
}
@Override
public K lastKey() {
return extremeKeyOrThrow(Right);
}
@SuppressWarnings("unchecked")
public Map.Entry lastEntry() {
return (SimpleImmutableEntry) extreme(false, Right);
}
private K extremeKeyOrThrow(final char dir) {
final K k = (K) extreme(true, dir);
if (k == null) {
throw new NoSuchElementException();
}
return k;
}
/** Returns a key if returnKey is true, a SimpleImmutableEntry otherwise.
* Returns null if none exists.
*/
private Object extreme(final boolean returnKey, final char dir) {
while (true) {
final Node right = holderRef.read().right;
if (right == null) {
return null;
} else {
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holderRef.read().right) {
// the reread of .right is the one protected by our read of ovl
final Object vo = attemptExtreme(returnKey, dir, right, ovl);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
private Object attemptExtreme(final boolean returnKey,
final char dir,
final Node node,
final long nodeOVL) {
while (true) {
final Node child = node.child(dir);
if (child == null) {
// read of the value must be protected by the OVL, because we
// must linearize against another thread that inserts a new min
// key and then changes this key's value
final Object vo = node.vOpt;
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
assert(vo != null);
return returnKey ? node.key : new SimpleImmutableEntry(node.key, decodeNull(vo));
} else {
// child is non-null
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else if (child != node.child(dir)) {
// this .child is the one that is protected by childOVL
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else {
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
final Object vo = attemptExtreme(returnKey, dir, child, childOVL);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
//////////////// quiesced search
@Override
@SuppressWarnings("unchecked")
public K lowerKey(final K key) {
return (K) boundedExtreme(null, false, comparable(key), false, true, Right);
}
@Override
@SuppressWarnings("unchecked")
public K floorKey(final K key) {
return (K) boundedExtreme(null, false, comparable(key), true, true, Right);
}
@Override
@SuppressWarnings("unchecked")
public K ceilingKey(final K key) {
return (K) boundedExtreme(comparable(key), true, null, false, true, Left);
}
@Override
@SuppressWarnings("unchecked")
public K higherKey(final K key) {
return (K) boundedExtreme(comparable(key), false, null, false, true, Left);
}
@Override
@SuppressWarnings("unchecked")
public Entry lowerEntry(final K key) {
return (Entry) boundedExtreme(null, false, comparable(key), false, false, Right);
}
@Override
@SuppressWarnings("unchecked")
public Entry floorEntry(final K key) {
return (Entry) boundedExtreme(null, false, comparable(key), true, false, Right);
}
@Override
@SuppressWarnings("unchecked")
public Entry ceilingEntry(final K key) {
return (Entry) boundedExtreme(comparable(key), true, null, false, false, Left);
}
@Override
@SuppressWarnings("unchecked")
public Entry higherEntry(final K key) {
return (Entry) boundedExtreme(comparable(key), false, null, false, false, Left);
}
/** Returns null if none exists. */
@SuppressWarnings("unchecked")
private K boundedExtremeKeyOrThrow(final Comparable super K> minCmp,
final boolean minIncl,
final Comparable super K> maxCmp,
final boolean maxIncl,
final char dir) {
final K k = (K) boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, dir);
if (k == null) {
throw new NoSuchElementException();
}
return k;
}
/** Returns null if none exists. */
@SuppressWarnings("unchecked")
private Object boundedExtreme(final Comparable super K> minCmp,
final boolean minIncl,
final Comparable super K> maxCmp,
final boolean maxIncl,
final boolean returnKey,
final char dir) {
K resultKey;
Object result;
if ((dir == Left && minCmp == null) || (dir == Right && maxCmp == null)) {
// no bound in the extreme direction, so use the concurrent search
result = extreme(returnKey, dir);
if (result == null) {
return null;
}
resultKey = returnKey ? (K) result : ((SimpleImmutableEntry) result).getKey();
}
else {
RootHolder holder = holderRef.availableFrozen();
final Epoch.Ticket ticket;
if (holder == null) {
ticket = holderRef.beginQuiescent();
holder = holderRef.read();
}
else {
ticket = null;
}
try {
final Node node = (dir == Left)
? boundedMin(holder.right, minCmp, minIncl)
: boundedMax(holder.right, maxCmp, maxIncl);
if (node == null) {
return null;
}
resultKey = node.key;
if (returnKey) {
result = node.key;
}
else if (ticket == null) {
// node of a frozen tree is okay, copy otherwise
result = node;
}
else {
// we must copy the node
result = new SimpleImmutableEntry(node.key, node.getValue());
}
}
finally {
if (ticket != null) {
ticket.leave(0);
}
}
}
if (dir == Left && maxCmp != null) {
final int c = maxCmp.compareTo(resultKey);
if (c < 0 || (c == 0 && !maxIncl)) {
return null;
}
}
if (dir == Right && minCmp != null) {
final int c = minCmp.compareTo(resultKey);
if (c > 0 || (c == 0 && !minIncl)) {
return null;
}
}
return result;
}
private Node boundedMin(Node node,
final Comparable super K> minCmp,
final boolean minIncl) {
while (node != null) {
final int c = minCmp.compareTo(node.key);
if (c < 0) {
// there may be a matching node on the left branch
final Node z = boundedMin(node.left, minCmp, minIncl);
if (z != null) {
return z;
}
}
if (c < 0 || (c == 0 && minIncl)) {
// this node is a candidate, is it actually present?
if (node.vOpt != null) {
return node;
}
}
// the matching node is on the right branch if it is present
node = node.right;
}
return null;
}
private Node boundedMax(Node node,
final Comparable super K> maxCmp,
final boolean maxIncl) {
while (node != null) {
final int c = maxCmp.compareTo(node.key);
if (c > 0) {
// there may be a matching node on the right branch
final Node z = boundedMax(node.right, maxCmp, maxIncl);
if (z != null) {
return z;
}
}
if (c > 0 || (c == 0 && maxIncl)) {
// this node is a candidate, is it actually present?
if (node.vOpt != null) {
return node;
}
}
// the matching node is on the left branch if it is present
node = node.left;
}
return null;
}
//////////////// update
private static final int UpdateAlways = 0;
private static final int UpdateIfAbsent = 1;
private static final int UpdateIfPresent = 2;
private static final int UpdateIfEq = 3;
private static boolean shouldUpdate(final int func, final Object prev, final Object expected) {
switch (func) {
case UpdateAlways: return true;
case UpdateIfAbsent: return prev == null;
case UpdateIfPresent: return prev != null;
default: { // UpdateIfEq
assert(expected != null);
if (prev == null) {
return false;
}
if (AllowNullValues && (prev == SpecialNull || expected == SpecialNull)) {
return prev == SpecialNull && expected == SpecialNull;
}
return prev.equals(expected);
}
}
}
private static Object noUpdateResult(final int func, final Object prev) {
return func == UpdateIfEq ? Boolean.FALSE : prev;
}
private static Object updateResult(final int func, final Object prev) {
return func == UpdateIfEq ? Boolean.TRUE : prev;
}
private static int sizeDelta(final int func, final Object result, final Object newValue) {
switch (func) {
case UpdateAlways: {
return (result != null ? -1 : 0) + (newValue != null ? 1 : 0);
}
case UpdateIfAbsent: {
assert(newValue != null);
return result != null ? 0 : 1;
}
case UpdateIfPresent: {
return result == null ? 0 : (newValue != null ? 0 : -1);
}
default: { // UpdateIfEq
return !((Boolean) result) ? 0 : (newValue != null ? 0 : -1);
}
}
}
@Override
public V put(final K key, final V value) {
return decodeNull(update(key, UpdateAlways, null, encodeNull(value)));
}
@Override
public V putIfAbsent(final K key, final V value) {
return decodeNull(update(key, UpdateIfAbsent, null, encodeNull(value)));
}
@Override
public V replace(final K key, final V value) {
return decodeNull(update(key, UpdateIfPresent, null, encodeNull(value)));
}
@Override
public boolean replace(final K key, final V oldValue, final V newValue) {
return (Boolean) update(key, UpdateIfEq, encodeNull(oldValue), encodeNull(newValue));
}
@Override
public V remove(final Object key) {
return decodeNull(update(key, UpdateAlways, null, null));
}
@Override
public boolean remove(final Object key, final Object value) {
if (key == null) {
throw new NullPointerException();
}
if (!AllowNullValues && value == null) {
return false;
}
return (Boolean) update(key, UpdateIfEq, encodeNull(value), null);
}
// manages the epoch
private Object update(final Object key,
final int func,
final Object expected,
final Object newValue) {
final Comparable super K> k = comparable(key);
int sd = 0;
final Epoch.Ticket ticket = holderRef.beginMutation();
try {
final Object result = updateUnderRoot(key, k, func, expected, newValue, holderRef.mutable());
sd = sizeDelta(func, result, newValue);
return result;
} finally {
ticket.leave(sd);
}
}
// manages updates to the root holder
@SuppressWarnings("unchecked")
private Object updateUnderRoot(final Object key,
final Comparable super K> k,
final int func,
final Object expected,
final Object newValue,
final RootHolder holder) {
while (true) {
final Node right = holder.unsharedRight();
if (right == null) {
// key is not present
if (!shouldUpdate(func, null, expected)) {
return noUpdateResult(func, null);
}
if (newValue == null || attemptInsertIntoEmpty((K)key, newValue, holder)) {
// nothing needs to be done, or we were successful, prev value is Absent
return updateResult(func, null);
}
// else RETRY
} else {
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holder.right) {
// this is the protected .right
final Object vo = attemptUpdate(key, k, func, expected, newValue, holder, right, ovl);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
private boolean attemptInsertIntoEmpty(final K key,
final Object vOpt,
final RootHolder holder) {
synchronized (holder) {
if (holder.right == null) {
holder.right = new Node(key, 1, vOpt, holder, 0L, null, null);
holder.height = 2;
return true;
} else {
return false;
}
}
}
/** If successful returns the non-null previous value, SpecialNull for a
* null previous value, or null if not previously in the map.
* The caller should retry if this method returns SpecialRetry.
*/
@SuppressWarnings("unchecked")
private Object attemptUpdate(final Object key,
final Comparable super K> k,
final int func,
final Object expected,
final Object newValue,
final Node parent,
final Node node,
final long nodeOVL) {
// As the search progresses there is an implicit min and max assumed for the
// branch of the tree rooted at node. A left rotation of a node x results in
// the range of keys in the right branch of x being reduced, so if we are at a
// node and we wish to traverse to one of the branches we must make sure that
// the node has not undergone a rotation since arriving from the parent.
//
// A rotation of node can't screw us up once we have traversed to node's
// child, so we don't need to build a huge transaction, just a chain of
// smaller read-only transactions.
assert (nodeOVL != UnlinkedOVL);
final int cmp = k.compareTo(node.key);
if (cmp == 0) {
return attemptNodeUpdate(func, expected, newValue, parent, node);
}
final char dirToC = cmp < 0 ? Left : Right;
while (true) {
final Node child = node.unsharedChild(dirToC);
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
if (child == null) {
// key is not present
if (newValue == null) {
// Removal is requested. Read of node.child occurred
// while parent.child was valid, so we were not affected
// by any shrinks.
return null;
} else {
// Update will be an insert.
final boolean success;
final Node damaged;
synchronized (node) {
// Validate that we haven't been affected by past
// rotations. We've got the lock on node, so no future
// rotations can mess with us.
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
if (node.child(dirToC) != null) {
// Lost a race with a concurrent insert. No need
// to back up to the parent, but we must RETRY in
// the outer loop of this method.
success = false;
damaged = null;
} else {
// We're valid. Does the user still want to
// perform the operation?
if (!shouldUpdate(func, null, expected)) {
return noUpdateResult(func, null);
}
// Create a new leaf
node.setChild(dirToC, new Node((K)key, 1, newValue, node, 0L, null, null));
success = true;
// attempt to fix node.height while we've still got
// the lock
damaged = fixHeight_nl(node);
}
}
if (success) {
fixHeightAndRebalance(damaged);
return updateResult(func, null);
}
// else RETRY
}
} else {
// non-null child
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
// RETRY
} else if (child != node.child(dirToC)) {
// this second read is important, because it is protected
// by childOVL
// RETRY
} else {
// validate the read that our caller took to get to node
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// At this point we know that the traversal our parent took
// to get to node is still valid. The recursive
// implementation will validate the traversal from node to
// child, so just prior to the nodeOVL validation both
// traversals were definitely okay. This means that we are
// no longer vulnerable to node shrinks, and we don't need
// to validate nodeOVL any more.
final Object vo = attemptUpdate(key, k, func, expected, newValue, node, child, childOVL);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
/** parent will only be used for unlink, update can proceed even if parent
* is stale.
*/
private Object attemptNodeUpdate(final int func,
final Object expected,
final Object newValue,
final Node parent,
final Node node) {
if (newValue == null) {
// removal
if (node.vOpt == null) {
// This node is already removed, nothing to do.
return null;
}
}
if (newValue == null && (node.left == null || node.right == null)) {
// potential unlink, get ready by locking the parent
final Object prev;
final Node damaged;
synchronized (parent) {
if (isUnlinked(parent.shrinkOVL) || node.parent != parent) {
return SpecialRetry;
}
synchronized (node) {
prev = node.vOpt;
if (!shouldUpdate(func, prev, expected)) {
return noUpdateResult(func, prev);
}
if (prev == null) {
return updateResult(func, prev);
}
if (!attemptUnlink_nl(parent, node)) {
return SpecialRetry;
}
}
// try to fix the parent while we've still got the lock
damaged = fixHeight_nl(parent);
}
fixHeightAndRebalance(damaged);
return updateResult(func, prev);
} else {
// potential update (including remove-without-unlink)
synchronized (node) {
// regular version changes don't bother us
if (isUnlinked(node.shrinkOVL)) {
return SpecialRetry;
}
final Object prev = node.vOpt;
if (!shouldUpdate(func, prev, expected)) {
return noUpdateResult(func, prev);
}
// retry if we now detect that unlink is possible
if (newValue == null && (node.left == null || node.right == null)) {
return SpecialRetry;
}
// update in-place
node.vOpt = newValue;
return updateResult(func, prev);
}
}
}
/** Does not adjust the size or any heights. */
private boolean attemptUnlink_nl(final Node parent, final Node node) {
// assert (Thread.holdsLock(parent));
// assert (Thread.holdsLock(node));
assert (!isUnlinked(parent.shrinkOVL));
final Node parentL = parent.left;
final Node parentR = parent.right;
if (parentL != node && parentR != node) {
// node is no longer a child of parent
return false;
}
assert (!isUnlinked(node.shrinkOVL));
assert (parent == node.parent);
final Node left = node.unsharedLeft();
final Node right = node.unsharedRight();
if (left != null && right != null) {
// splicing is no longer possible
return false;
}
final Node splice = left != null ? left : right;
if (parentL == node) {
parent.left = splice;
} else {
parent.right = splice;
}
if (splice != null) {
splice.parent = parent;
}
node.shrinkOVL = UnlinkedOVL;
node.vOpt = null;
return true;
}
//////////////// NavigableMap stuff
@Override
public Map.Entry pollFirstEntry() {
return pollExtremeEntry(Left);
}
@Override
public Map.Entry pollLastEntry() {
return pollExtremeEntry(Right);
}
private Map.Entry pollExtremeEntry(final char dir) {
final Epoch.Ticket ticket = holderRef.beginMutation();
int sizeDelta = 0;
try {
final Map.Entry prev = pollExtremeEntryUnderRoot(dir, holderRef.mutable());
if (prev != null) {
sizeDelta = -1;
}
return prev;
} finally {
ticket.leave(sizeDelta);
}
}
private Map.Entry pollExtremeEntryUnderRoot(final char dir, final RootHolder holder) {
while (true) {
final Node right = holder.unsharedRight();
if (right == null) {
// tree is empty, nothing to remove
return null;
} else {
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holder.right) {
// this is the protected .right
final Map.Entry result = attemptRemoveExtreme(dir, holder, right, ovl);
if (result != SpecialRetry) {
return result;
}
// else RETRY
}
}
}
}
private Map.Entry attemptRemoveExtreme(final char dir,
final Node parent,
final Node node,
final long nodeOVL) {
assert (nodeOVL != UnlinkedOVL);
while (true) {
final Node child = node.unsharedChild(dir);
if (nodeOVL != node.shrinkOVL) {
return null;
}
if (child == null) {
// potential unlink, get ready by locking the parent
final Object vo;
final Node damaged;
synchronized (parent) {
if (isUnlinked(parent.shrinkOVL) || node.parent != parent) {
return null;
}
synchronized (node) {
vo = node.vOpt;
if (node.child(dir) != null || !attemptUnlink_nl(parent, node)) {
return null;
}
// success!
}
// try to fix parent.height while we've still got the lock
damaged = fixHeight_nl(parent);
}
fixHeightAndRebalance(damaged);
return new SimpleImmutableEntry(node.key, decodeNull(vo));
} else {
// keep going down
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
// RETRY
} else if (child != node.child(dir)) {
// this second read is important, because it is protected
// by childOVL
// RETRY
} else {
// validate the read that our caller took to get to node
if (node.shrinkOVL != nodeOVL) {
return null;
}
final Map.Entry result = attemptRemoveExtreme(dir, node, child, childOVL);
if (result != null) {
return result;
}
// else RETRY
}
}
}
}
//////////////// tree balance and height info repair
private static final int UnlinkRequired = -1;
private static final int RebalanceRequired = -2;
private static final int NothingRequired = -3;
private int nodeCondition(final Node node) {
// Begin atomic.
final Node nL = node.left;
final Node nR = node.right;
if ((nL == null || nR == null) && node.vOpt == null) {
return UnlinkRequired;
}
final int hN = node.height;
final int hL0 = height(nL);
final int hR0 = height(nR);
// End atomic. Since any thread that changes a node promises to fix
// it, either our read was consistent (and a NothingRequired conclusion
// is correct) or someone else has taken responsibility for either node
// or one of its children.
final int hNRepl = 1 + Math.max(hL0, hR0);
final int bal = hL0 - hR0;
if (bal < -1 || bal > 1) {
return RebalanceRequired;
}
return hN != hNRepl ? hNRepl : NothingRequired;
}
private void fixHeightAndRebalance(Node node) {
while (node != null && node.parent != null) {
final int condition = nodeCondition(node);
if (condition == NothingRequired || isUnlinked(node.shrinkOVL)) {
// nothing to do, or no point in fixing this node
return;
}
if (condition != UnlinkRequired && condition != RebalanceRequired) {
synchronized (node) {
node = fixHeight_nl(node);
}
} else {
final Node nParent = node.parent;
synchronized (nParent) {
if (!isUnlinked(nParent.shrinkOVL) && node.parent == nParent) {
synchronized (node) {
node = rebalance_nl(nParent, node);
}
}
// else RETRY
}
}
}
}
/** Attempts to fix the height of a (locked) damaged node, returning the
* lowest damaged node for which this thread is responsible. Returns null
* if no more repairs are needed.
*/
private Node fixHeight_nl(final Node node) {
final int c = nodeCondition(node);
switch (c) {
case RebalanceRequired:
case UnlinkRequired:
// can't repair
return node;
case NothingRequired:
// Any future damage to this node is not our responsibility.
return null;
default:
node.height = c;
// we've damaged our parent, but we can't fix it now
return node.parent;
}
}
/** nParent and n must be locked on entry. Returns a damaged node, or null
* if no more rebalancing is necessary.
*/
private Node rebalance_nl(final Node nParent, final Node n) {
final Node nL = n.unsharedLeft();
final Node nR = n.unsharedRight();
if ((nL == null || nR == null) && n.vOpt == null) {
if (attemptUnlink_nl(nParent, n)) {
// attempt to fix nParent.height while we've still got the lock
return fixHeight_nl(nParent);
} else {
// retry needed for n
return n;
}
}
final int hN = n.height;
final int hL0 = height(nL);
final int hR0 = height(nR);
final int hNRepl = 1 + Math.max(hL0, hR0);
final int bal = hL0 - hR0;
if (bal > 1) {
return rebalanceToRight_nl(nParent, n, nL, hR0);
} else if (bal < -1) {
return rebalanceToLeft_nl(nParent, n, nR, hL0);
} else if (hNRepl != hN) {
// we've got more than enough locks to do a height change, no need to
// trigger a retry
n.height = hNRepl;
// nParent is already locked, let's try to fix it too
return fixHeight_nl(nParent);
} else {
// nothing to do
return null;
}
}
private Node rebalanceToRight_nl(final Node nParent,
final Node n,
final Node nL,
final int hR0) {
// L is too large, we will rotate-right. If L.R is taller
// than L.L, then we will first rotate-left L.
synchronized (nL) {
final int hL = nL.height;
if (hL - hR0 <= 1) {
return n; // retry
} else {
final Node nLR = nL.unsharedRight();
final int hLL0 = height(nL.left);
final int hLR0 = height(nLR);
if (hLL0 >= hLR0) {
// rotate right based on our snapshot of hLR
return rotateRight_nl(nParent, n, nL, hR0, hLL0, nLR, hLR0);
} else {
synchronized (nLR) {
// If our hLR snapshot is incorrect then we might
// actually need to do a single rotate-right on n.
final int hLR = nLR.height;
if (hLL0 >= hLR) {
return rotateRight_nl(nParent, n, nL, hR0, hLL0, nLR, hLR);
} else {
// If the underlying left balance would not be
// sufficient to actually fix n.left, then instead
// of rolling it into a double rotation we do it on
// it's own. This may let us avoid rotating n at
// all, but more importantly it avoids the creation
// of damaged nodes that don't have a direct
// ancestry relationship. The recursive call to
// rebalanceToRight_nl in this case occurs after we
// release the lock on nLR.
final int hLRL = height(nLR.left);
final int b = hLL0 - hLRL;
if (b >= -1 && b <= 1) {
// nParent.child.left won't be damaged after a double rotation
return rotateRightOverLeft_nl(nParent, n, nL, hR0, hLL0, nLR, hLRL);
}
}
}
// focus on nL, if necessary n will be balanced later
return rebalanceToLeft_nl(n, nL, nLR, hLL0);
}
}
}
}
private Node rebalanceToLeft_nl(final Node nParent,
final Node n,
final Node nR,
final int hL0) {
synchronized (nR) {
final int hR = nR.height;
if (hL0 - hR >= -1) {
return n; // retry
} else {
final Node nRL = nR.unsharedLeft();
final int hRL0 = height(nRL);
final int hRR0 = height(nR.right);
if (hRR0 >= hRL0) {
return rotateLeft_nl(nParent, n, hL0, nR, nRL, hRL0, hRR0);
} else {
synchronized (nRL) {
final int hRL = nRL.height;
if (hRR0 >= hRL) {
return rotateLeft_nl(nParent, n, hL0, nR, nRL, hRL, hRR0);
} else {
final int hRLR = height(nRL.right);
final int b = hRR0 - hRLR;
if (b >= -1 && b <= 1) {
return rotateLeftOverRight_nl(nParent, n, hL0, nR, nRL, hRR0, hRLR);
}
}
}
return rebalanceToRight_nl(n, nR, nRL, hRR0);
}
}
}
}
private Node rotateRight_nl(final Node nParent,
final Node n,
final Node nL,
final int hR,
final int hLL,
final Node nLR,
final int hLR) {
final long nodeOVL = n.shrinkOVL;
final Node nPL = nParent.left;
n.shrinkOVL = beginChange(nodeOVL);
n.left = nLR;
if (nLR != null) {
nLR.parent = n;
}
nL.right = n;
n.parent = nL;
if (nPL == n) {
nParent.left = nL;
} else {
nParent.right = nL;
}
nL.parent = nParent;
// fix up heights links
final int hNRepl = 1 + Math.max(hLR, hR);
n.height = hNRepl;
nL.height = 1 + Math.max(hLL, hNRepl);
n.shrinkOVL = endChange(nodeOVL);
// We have damaged nParent, n (now parent.child.right), and nL (now
// parent.child). n is the deepest. Perform as many fixes as we can
// with the locks we've got.
// We've already fixed the height for n, but it might still be outside
// our allowable balance range. In that case a simple fixHeight_nl
// won't help.
final int balN = hLR - hR;
if (balN < -1 || balN > 1) {
// we need another rotation at n
return n;
}
// we've already fixed the height at nL, do we need a rotation here?
final int balL = hLL - hNRepl;
if (balL < -1 || balL > 1) {
return nL;
}
// try to fix the parent height while we've still got the lock
return fixHeight_nl(nParent);
}
private Node rotateLeft_nl(final Node nParent,
final Node n,
final int hL,
final Node nR,
final Node nRL,
final int hRL,
final int hRR) {
final long nodeOVL = n.shrinkOVL;
final Node nPL = nParent.left;
n.shrinkOVL = beginChange(nodeOVL);
// fix up n links, careful to be compatible with concurrent traversal for all but n
n.right = nRL;
if (nRL != null) {
nRL.parent = n;
}
nR.left = n;
n.parent = nR;
if (nPL == n) {
nParent.left = nR;
} else {
nParent.right = nR;
}
nR.parent = nParent;
// fix up heights
final int hNRepl = 1 + Math.max(hL, hRL);
n.height = hNRepl;
nR.height = 1 + Math.max(hNRepl, hRR);
n.shrinkOVL = endChange(nodeOVL);
final int balN = hRL - hL;
if (balN < -1 || balN > 1) {
return n;
}
final int balR = hRR - hNRepl;
if (balR < -1 || balR > 1) {
return nR;
}
return fixHeight_nl(nParent);
}
private Node rotateRightOverLeft_nl(final Node nParent,
final Node n,
final Node nL,
final int hR,
final int hLL,
final Node nLR,
final int hLRL) {
final long nodeOVL = n.shrinkOVL;
final long leftOVL = nL.shrinkOVL;
final Node nPL = nParent.left;
final Node nLRL = nLR.unsharedLeft();
final Node nLRR = nLR.unsharedRight();
final int hLRR = height(nLRR);
n.shrinkOVL = beginChange(nodeOVL);
nL.shrinkOVL = beginChange(leftOVL);
// fix up n links, careful about the order!
n.left = nLRR;
if (nLRR != null) {
nLRR.parent = n;
}
nL.right = nLRL;
if (nLRL != null) {
nLRL.parent = nL;
}
nLR.left = nL;
nL.parent = nLR;
nLR.right = n;
n.parent = nLR;
if (nPL == n) {
nParent.left = nLR;
} else {
nParent.right = nLR;
}
nLR.parent = nParent;
// fix up heights
final int hNRepl = 1 + Math.max(hLRR, hR);
n.height = hNRepl;
final int hLRepl = 1 + Math.max(hLL, hLRL);
nL.height = hLRepl;
nLR.height = 1 + Math.max(hLRepl, hNRepl);
n.shrinkOVL = endChange(nodeOVL);
nL.shrinkOVL = endChange(leftOVL);
// caller should have performed only a single rotation if nL was going
// to end up damaged
assert(Math.abs(hLL - hLRL) <= 1);
// We have damaged nParent, nLR (now parent.child), and n (now
// parent.child.right). n is the deepest. Perform as many fixes as we
// can with the locks we've got.
// We've already fixed the height for n, but it might still be outside
// our allowable balance range. In that case a simple fixHeight_nl
// won't help.
final int balN = hLRR - hR;
if (balN < -1 || balN > 1) {
// we need another rotation at n
return n;
}
// we've already fixed the height at nLR, do we need a rotation here?
final int balLR = hLRepl - hNRepl;
if (balLR < -1 || balLR > 1) {
return nLR;
}
// try to fix the parent height while we've still got the lock
return fixHeight_nl(nParent);
}
private Node rotateLeftOverRight_nl(final Node nParent,
final Node n,
final int hL,
final Node nR,
final Node nRL,
final int hRR,
final int hRLR) {
final long nodeOVL = n.shrinkOVL;
final long rightOVL = nR.shrinkOVL;
final Node nPL = nParent.left;
final Node nRLL = nRL.unsharedLeft();
final Node nRLR = nRL.unsharedRight();
final int hRLL = height(nRLL);
n.shrinkOVL = beginChange(nodeOVL);
nR.shrinkOVL = beginChange(rightOVL);
// fix up n links, careful about the order!
n.right = nRLL;
if (nRLL != null) {
nRLL.parent = n;
}
nR.left = nRLR;
if (nRLR != null) {
nRLR.parent = nR;
}
nRL.right = nR;
nR.parent = nRL;
nRL.left = n;
n.parent = nRL;
if (nPL == n) {
nParent.left = nRL;
} else {
nParent.right = nRL;
}
nRL.parent = nParent;
// fix up heights
final int hNRepl = 1 + Math.max(hL, hRLL);
n.height = hNRepl;
final int hRRepl = 1 + Math.max(hRLR, hRR);
nR.height = hRRepl;
nRL.height = 1 + Math.max(hNRepl, hRRepl);
n.shrinkOVL = endChange(nodeOVL);
nR.shrinkOVL = endChange(rightOVL);
assert(Math.abs(hRR - hRLR) <= 1);
final int balN = hRLL - hL;
if (balN < -1 || balN > 1) {
return n;
}
final int balRL = hRRepl - hNRepl;
if (balRL < -1 || balRL > 1) {
return nRL;
}
return fixHeight_nl(nParent);
}
//////////////// Map views
@Override
public NavigableSet keySet() {
return navigableKeySet();
}
@Override
public Set> entrySet() {
return new EntrySet();
}
private class EntrySet extends AbstractSet> {
@Override
public int size() {
return SnapTreeMap.this.size();
}
@Override
public boolean isEmpty() {
return SnapTreeMap.this.isEmpty();
}
@Override
public void clear() {
SnapTreeMap.this.clear();
}
@Override
public boolean contains(final Object o) {
if (!(o instanceof Map.Entry,?>)) {
return false;
}
final Object k = ((Map.Entry,?>)o).getKey();
final Object v = ((Map.Entry,?>)o).getValue();
final Object actualVo = SnapTreeMap.this.getImpl(k);
if (actualVo == null) {
// no associated value
return false;
}
final V actual = decodeNull(actualVo);
return v == null ? actual == null : v.equals(actual);
}
@Override
public boolean add(final Entry e) {
final Object v = encodeNull(e.getValue());
return update(e.getKey(), UpdateAlways, null, v) != v;
}
@Override
public boolean remove(final Object o) {
if (!(o instanceof Map.Entry,?>)) {
return false;
}
final Object k = ((Map.Entry,?>)o).getKey();
final Object v = ((Map.Entry,?>)o).getValue();
return SnapTreeMap.this.remove(k, v);
}
@Override
public Iterator> iterator() {
return new EntryIter(SnapTreeMap.this);
}
}
private static class EntryIter extends AbstractIter implements Iterator> {
private EntryIter(final SnapTreeMap m) {
super(m);
}
private EntryIter(final SnapTreeMap m,
final Comparable super K> minCmp,
final boolean minIncl,
final Comparable super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
super(m, minCmp, minIncl, maxCmp, maxIncl, descending);
}
@Override
public Entry next() {
return nextNode();
}
}
private static class KeyIter extends AbstractIter implements Iterator {
private KeyIter(final SnapTreeMap m) {
super(m);
}
private KeyIter(final SnapTreeMap m,
final Comparable super K> minCmp,
final boolean minIncl,
final Comparable super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
super(m, minCmp, minIncl, maxCmp, maxIncl, descending);
}
@Override
public K next() {
return nextNode().key;
}
}
private static class AbstractIter {
private final SnapTreeMap m;
private final boolean descending;
private final char forward;
private final char reverse;
private Node[] path;
private int depth = 0;
private Node mostRecentNode;
private final K endKey;
@SuppressWarnings("unchecked")
AbstractIter(final SnapTreeMap m) {
this.m = m;
this.descending = false;
this.forward = Right;
this.reverse = Left;
final Node root = m.holderRef.frozen().right;
this.path = (Node[]) new Node[1 + height(root)];
this.endKey = null;
pushFirst(root);
}
@SuppressWarnings("unchecked")
AbstractIter(final SnapTreeMap m,
final Comparable super K> minCmp,
final boolean minIncl,
final Comparable super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
this.m = m;
this.descending = descending;
this.forward = !descending ? Right : Left;
this.reverse = !descending ? Left : Right;
final Comparable super K> fromCmp;
final boolean fromIncl = !descending ? minIncl : maxIncl;
final Comparable super K> toCmp;
final boolean toIncl = !descending ? maxIncl : minIncl;
if (!descending) {
fromCmp = minCmp;
toCmp = maxCmp;
} else {
fromCmp = maxCmp;
toCmp = minCmp;
}
final Node root = m.holderRef.frozen().right;
if (toCmp != null) {
this.endKey = (K) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, forward);
if (this.endKey == null) {
// no node satisfies the bound, nothing to iterate
// ---------> EARLY EXIT
return;
}
} else {
this.endKey = null;
}
this.path = (Node[]) new Node[1 + height(root)];
if (fromCmp == null) {
pushFirst(root);
}
else {
pushFirst(root, fromCmp, fromIncl);
if (depth > 0 && top().vOpt == null) {
advance();
}
}
}
private int cmp(final Comparable super K> comparable, final K key) {
final int c = comparable.compareTo(key);
if (!descending) {
return c;
} else {
return c == Integer.MIN_VALUE ? 1 : -c;
}
}
private void pushFirst(Node node) {
while (node != null) {
path[depth++] = node;
node = node.child(reverse);
}
}
private void pushFirst(Node node, final Comparable super K> fromCmp, final boolean fromIncl) {
while (node != null) {
final int c = cmp(fromCmp, node.key);
if (c > 0 || (c == 0 && !fromIncl)) {
// everything we're interested in is on the right
node = node.child(forward);
}
else {
path[depth++] = node;
if (c == 0) {
// start the iteration here
return;
}
else {
node = node.child(reverse);
}
}
}
}
private Node top() {
return path[depth - 1];
}
private void advance() {
do {
final Node t = top();
if (endKey != null && endKey == t.key) {
depth = 0;
path = null;
return;
}
final Node fwd = t.child(forward);
if (fwd != null) {
pushFirst(fwd);
} else {
// keep going up until we pop a node that is a left child
Node popped;
do {
popped = path[--depth];
} while (depth > 0 && popped == top().child(forward));
}
if (depth == 0) {
// clear out the path so we don't pin too much stuff
path = null;
return;
}
// skip removed-but-not-unlinked entries
} while (top().vOpt == null);
}
public boolean hasNext() {
return depth > 0;
}
Node nextNode() {
if (depth == 0) {
throw new NoSuchElementException();
}
mostRecentNode = top();
advance();
return mostRecentNode;
}
public void remove() {
if (mostRecentNode == null) {
throw new IllegalStateException();
}
m.remove(mostRecentNode.key);
mostRecentNode = null;
}
}
//////////////// navigable keySet
@Override
public NavigableSet navigableKeySet() {
return new KeySet(this) {
public Iterator iterator() {
return new KeyIter(SnapTreeMap.this);
}
};
}
@Override
public NavigableSet descendingKeySet() {
return descendingMap().navigableKeySet();
}
private abstract static class KeySet extends AbstractSet implements NavigableSet {
private final ConcurrentNavigableMap map;
protected KeySet(final ConcurrentNavigableMap map) {
this.map = map;
}
//////// basic Set stuff
@Override
abstract public Iterator iterator();
@Override
public boolean contains(final Object o) { return map.containsKey(o); }
@Override
public boolean isEmpty() { return map.isEmpty(); }
@Override
public int size() { return map.size(); }
@Override
public boolean remove(final Object o) { return map.remove(o) != null; }
//////// SortedSet stuff
@Override
public Comparator super K> comparator() { return map.comparator(); }
@Override
public K first() { return map.firstKey(); }
@Override
public K last() { return map.lastKey(); }
//////// NavigableSet stuff
@Override
public K lower(final K k) { return map.lowerKey(k); }
@Override
public K floor(final K k) { return map.floorKey(k); }
@Override
public K ceiling(final K k) { return map.ceilingKey(k); }
@Override
public K higher(final K k) { return map.higherKey(k); }
@Override
public K pollFirst() { return map.pollFirstEntry().getKey(); }
@Override
public K pollLast() { return map.pollLastEntry().getKey(); }
@Override
public NavigableSet descendingSet() { return map.descendingKeySet(); }
@Override
public Iterator descendingIterator() { return map.descendingKeySet().iterator(); }
@Override
public NavigableSet subSet(final K fromElement, final boolean minInclusive, final K toElement, final boolean maxInclusive) {
return map.subMap(fromElement, minInclusive, toElement, maxInclusive).keySet();
}
@Override
public NavigableSet headSet(final K toElement, final boolean inclusive) {
return map.headMap(toElement, inclusive).keySet();
}
@Override
public NavigableSet tailSet(final K fromElement, final boolean inclusive) {
return map.tailMap(fromElement, inclusive).keySet();
}
@Override
public SortedSet subSet(final K fromElement, final K toElement) {
return map.subMap(fromElement, toElement).keySet();
}
@Override
public SortedSet headSet(final K toElement) {
return map.headMap(toElement).keySet();
}
@Override
public SortedSet tailSet(final K fromElement) {
return map.tailMap(fromElement).keySet();
}
}
//////////////// NavigableMap views
@Override
public ConcurrentNavigableMap subMap(final K fromKey,
final boolean fromInclusive,
final K toKey,
final boolean toInclusive) {
final Comparable super K> fromCmp = comparable(fromKey);
if (fromCmp.compareTo(toKey) > 0) {
throw new IllegalArgumentException();
}
return new SubMap(this, fromKey, fromCmp, fromInclusive, toKey, comparable(toKey), toInclusive, false);
}
@Override
public ConcurrentNavigableMap headMap(final K toKey, final boolean inclusive) {
return new SubMap(this, null, null, false, toKey, comparable(toKey), inclusive, false);
}
@Override
public ConcurrentNavigableMap tailMap(final K fromKey, final boolean inclusive) {
return new SubMap(this, fromKey, comparable(fromKey), inclusive, null, null, false, false);
}
@Override
public ConcurrentNavigableMap subMap(final K fromKey, final K toKey) {
return subMap(fromKey, true, toKey, false);
}
@Override
public ConcurrentNavigableMap headMap(final K toKey) {
return headMap(toKey, false);
}
@Override
public ConcurrentNavigableMap tailMap(final K fromKey) {
return tailMap(fromKey, true);
}
@Override
public ConcurrentNavigableMap descendingMap() {
return new SubMap(this, null, null, false, null, null, false, true);
}
private static class SubMap extends AbstractMap implements ConcurrentNavigableMap, Serializable {
private static final long serialVersionUID = -7388140285999372919L;
private final SnapTreeMap m;
private final K minKey;
private transient Comparable super K> minCmp;
private final boolean minIncl;
private final K maxKey;
private transient Comparable super K> maxCmp;
private final boolean maxIncl;
private final boolean descending;
private SubMap(final SnapTreeMap m,
final K minKey,
final Comparable super K> minCmp,
final boolean minIncl,
final K maxKey,
final Comparable super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
this.m = m;
this.minKey = minKey;
this.minCmp = minCmp;
this.minIncl = minIncl;
this.maxKey = maxKey;
this.maxCmp = maxCmp;
this.maxIncl = maxIncl;
this.descending = descending;
}
// TODO: clone
private boolean tooLow(final K key) {
if (minCmp == null) {
return false;
} else {
final int c = minCmp.compareTo(key);
return c > 0 || (c == 0 && !minIncl);
}
}
private boolean tooHigh(final K key) {
if (maxCmp == null) {
return false;
} else {
final int c = maxCmp.compareTo(key);
return c < 0 || (c == 0 && !maxIncl);
}
}
private boolean inRange(final K key) {
return !tooLow(key) && !tooHigh(key);
}
private void requireInRange(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!inRange(key)) {
throw new IllegalArgumentException();
}
}
private char minDir() {
return descending ? Right : Left;
}
private char maxDir() {
return descending ? Left : Right;
}
//////// AbstractMap
@Override
public boolean isEmpty() {
return m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, Left) == null;
}
@Override
public int size() {
final Node root = m.holderRef.frozen().right;
return Node.computeFrozenSize(root, minCmp, minIncl, maxCmp, maxIncl);
}
@Override
@SuppressWarnings("unchecked")
public boolean containsKey(final Object key) {
if (key == null) {
throw new NullPointerException();
}
final K k = (K) key;
return inRange(k) && m.containsKey(k);
}
@Override
public boolean containsValue(final Object value) {
// apply the same null policy as the rest of the code, but fall
// back to the default implementation
encodeNull(value);
return super.containsValue(value);
}
@Override
@SuppressWarnings("unchecked")
public V get(final Object key) {
if (key == null) {
throw new NullPointerException();
}
final K k = (K) key;
return !inRange(k) ? null : m.get(k);
}
@Override
public V put(final K key, final V value) {
requireInRange(key);
return m.put(key, value);
}
@Override
@SuppressWarnings("unchecked")
public V remove(final Object key) {
if (key == null) {
throw new NullPointerException();
}
return !inRange((K) key) ? null : m.remove(key);
}
@Override
public Set> entrySet() {
return new EntrySubSet();
}
private class EntrySubSet extends AbstractSet> {
public int size() {
return SubMap.this.size();
}
@Override
public boolean isEmpty() {
return SubMap.this.isEmpty();
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(final Object o) {
if (!(o instanceof Map.Entry,?>)) {
return false;
}
final Object k = ((Map.Entry,?>)o).getKey();
if (!inRange((K) k)) {
return false;
}
final Object v = ((Map.Entry,?>)o).getValue();
final Object actualVo = m.getImpl(k);
if (actualVo == null) {
// no associated value
return false;
}
final V actual = m.decodeNull(actualVo);
return v == null ? actual == null : v.equals(actual);
}
@Override
public boolean add(final Entry e) {
requireInRange(e.getKey());
final Object v = encodeNull(e.getValue());
return m.update(e.getKey(), UpdateAlways, null, v) != v;
}
@Override
public boolean remove(final Object o) {
if (!(o instanceof Map.Entry,?>)) {
return false;
}
final Object k = ((Map.Entry,?>)o).getKey();
final Object v = ((Map.Entry,?>)o).getValue();
return SubMap.this.remove(k, v);
}
@Override
public Iterator> iterator() {
return new EntryIter(m, minCmp, minIncl, maxCmp, maxIncl, descending);
}
}
//////// SortedMap
@Override
public Comparator super K> comparator() {
final Comparator super K> fromM = m.comparator();
if (descending) {
return Collections.reverseOrder(fromM);
} else {
return fromM;
}
}
@Override
public K firstKey() {
return m.boundedExtremeKeyOrThrow(minCmp, minIncl, maxCmp, maxIncl, minDir());
}
@Override
public K lastKey() {
return m.boundedExtremeKeyOrThrow(minCmp, minIncl, maxCmp, maxIncl, maxDir());
}
//////// NavigableMap
@SuppressWarnings("unchecked")
private K firstKeyOrNull() {
return (K) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, minDir());
}
@SuppressWarnings("unchecked")
private K lastKeyOrNull() {
return (K) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, maxDir());
}
@SuppressWarnings("unchecked")
private Entry firstEntryOrNull() {
return (Entry