it.unimi.dsi.fastutil.doubles.DoubleLinkedOpenHashSet Maven / Gradle / Ivy
Show all versions of double-sets Show documentation
/*
* Copyright (C) 2002-2024 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.doubles;
import it.unimi.dsi.fastutil.Hash;
import it.unimi.dsi.fastutil.HashCommon;
import static it.unimi.dsi.fastutil.HashCommon.arraySize;
import static it.unimi.dsi.fastutil.HashCommon.maxFill;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
/** A type-specific linked hash set with with a fast, small-footprint implementation.
*
* Instances of this class use a hash table to represent a set. The table is
* filled up to a specified load factor, and then doubled in size to
* accommodate new entries. If the table is emptied below one fourth
* of the load factor, it is halved in size; however, the table is never reduced to a
* size smaller than that at creation time: this approach makes it
* possible to create sets with a large capacity in which insertions and
* deletions do not cause immediately rehashing. Moreover, halving is
* not performed when deleting entries from an iterator, as it would interfere
* with the iteration process.
*
*
Note that {@link #clear()} does not modify the hash table size.
* Rather, a family of {@linkplain #trim() trimming
* methods} lets you control the size of the table; this is particularly useful
* if you reuse instances of this class.
*
*
Iterators generated by this set will enumerate elements in the same order in which they
* have been added to the set (addition of elements already present
* in the set does not change the iteration order). Note that this order has nothing in common with the natural
* order of the keys. The order is kept by means of a doubly linked list, represented
* via an array of longs parallel to the table.
*
*
This class implements the interface of a sorted set, so to allow easy
* access of the iteration order: for instance, you can get the first element
* in iteration order with {@code first()} without having to create an
* iterator; however, this class partially violates the {@link java.util.SortedSet}
* contract because all subset methods throw an exception and {@link
* #comparator()} returns always {@code null}.
*
*
Additional methods, such as {@code addAndMoveToFirst()}, make it easy
* to use instances of this class as a cache (e.g., with LRU policy).
*
*
The iterators provided by this class are type-specific {@linkplain
* java.util.ListIterator list iterators}, and can be started at any
* element which is in the set (if the provided element
* is not in the set, a {@link NoSuchElementException} exception will be thrown).
* If, however, the provided element is not the first or last element in the
* set, the first access to the list index will require linear time, as in the worst case
* the entire set must be scanned in iteration order to retrieve the positional
* index of the starting element. If you use just the methods of a type-specific {@link it.unimi.dsi.fastutil.BidirectionalIterator},
* however, all operations will be performed in constant time.
*
* @see Hash
* @see HashCommon
*/
public class DoubleLinkedOpenHashSet extends AbstractDoubleSortedSet implements java.io.Serializable, Cloneable, Hash {
private static final long serialVersionUID = 0L;
private static final boolean ASSERTS = false;
/** The array of keys. */
protected transient double[] key;
/** The mask for wrapping a position counter. */
protected transient int mask;
/** Whether this set contains the null key. */
protected transient boolean containsNull;
/** The index of the first entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */
protected transient int first = -1;
/** The index of the last entry in iteration order. It is valid iff {@link #size} is nonzero; otherwise, it contains -1. */
protected transient int last = -1;
/** For each entry, the next and the previous entry in iteration order,
* stored as {@code ((prev & 0xFFFFFFFFL) << 32) | (next & 0xFFFFFFFFL)}.
* The first entry contains predecessor -1, and the last entry
* contains successor -1. */
protected transient long[] link;
/** The current table size. Note that an additional element is allocated for storing the null key. */
protected transient int n;
/** Threshold after which we rehash. It must be the table size times {@link #f}. */
protected transient int maxFill;
/** We never resize below this threshold, which is the construction-time {#n}. */
protected final transient int minN;
/** Number of entries in the set (including the null key, if present). */
protected int size;
/** The acceptable load factor. */
protected final float f;
/** Creates a new hash set.
*
*
The actual table size will be the least power of two greater than {@code expected}/{@code f}.
*
* @param expected the expected number of elements in the hash set.
* @param f the load factor.
*/
public DoubleLinkedOpenHashSet(final int expected, final float f) {
if (f <= 0 || f >= 1) throw new IllegalArgumentException("Load factor must be greater than 0 and smaller than 1");
if (expected < 0) throw new IllegalArgumentException("The expected number of elements must be nonnegative");
this.f = f;
minN = n = arraySize(expected, f);
mask = n - 1;
maxFill = maxFill(n, f);
key = new double[n + 1];
link = new long[n + 1];
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*
* @param expected the expected number of elements in the hash set.
*/
public DoubleLinkedOpenHashSet(final int expected) {
this(expected, DEFAULT_LOAD_FACTOR);
}
/** Creates a new hash set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements
* and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*/
public DoubleLinkedOpenHashSet() {
this(DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR);
}
/** Creates a new hash set copying a given collection.
*
* @param c a {@link Collection} to be copied into the new hash set.
* @param f the load factor.
*/
public DoubleLinkedOpenHashSet(final Collection extends Double> c, final float f) {
this(c.size(), f);
addAll(c);
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* copying a given collection.
*
* @param c a {@link Collection} to be copied into the new hash set.
*/
public DoubleLinkedOpenHashSet(final Collection extends Double> c) {
this(c, DEFAULT_LOAD_FACTOR);
}
/** Creates a new hash set copying a given type-specific collection.
*
* @param c a type-specific collection to be copied into the new hash set.
* @param f the load factor.
*/
public DoubleLinkedOpenHashSet(final DoubleCollection c, final float f) {
this(c.size(), f);
addAll(c);
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* copying a given type-specific collection.
*
* @param c a type-specific collection to be copied into the new hash set.
*/
public DoubleLinkedOpenHashSet(final DoubleCollection c) {
this(c, DEFAULT_LOAD_FACTOR);
}
/** Creates a new hash set using elements provided by a type-specific iterator.
*
* @param i a type-specific iterator whose elements will fill the set.
* @param f the load factor.
*/
public DoubleLinkedOpenHashSet(final DoubleIterator i, final float f) {
this(DEFAULT_INITIAL_SIZE, f);
while(i.hasNext()) add(i.nextDouble());
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator.
*
* @param i a type-specific iterator whose elements will fill the set.
*/
public DoubleLinkedOpenHashSet(final DoubleIterator i) {
this(i, DEFAULT_LOAD_FACTOR);
}
/** Creates a new hash set using elements provided by an iterator.
*
* @param i an iterator whose elements will fill the set.
* @param f the load factor.
*/
public DoubleLinkedOpenHashSet(final Iterator> i, final float f) {
this(DoubleIterators.asDoubleIterator(i), f);
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by an iterator.
*
* @param i an iterator whose elements will fill the set.
*/
public DoubleLinkedOpenHashSet(final Iterator> i) {
this(DoubleIterators.asDoubleIterator(i));
}
/** Creates a new hash set and fills it with the elements of a given array.
*
* @param a an array whose elements will be used to fill the set.
* @param offset the first element to use.
* @param length the number of elements to use.
* @param f the load factor.
*/
public DoubleLinkedOpenHashSet(final double[] a, final int offset, final int length, final float f) {
this(length < 0 ? 0 : length, f);
DoubleArrays.ensureOffsetLength(a, offset, length);
for(int i = 0; i < length; i++) add(a[offset + i]);
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array.
*
* @param a an array whose elements will be used to fill the set.
* @param offset the first element to use.
* @param length the number of elements to use.
*/
public DoubleLinkedOpenHashSet(final double[] a, final int offset, final int length) {
this(a, offset, length, DEFAULT_LOAD_FACTOR);
}
/** Creates a new hash set copying the elements of an array.
*
* @param a an array to be copied into the new hash set.
* @param f the load factor.
*/
public DoubleLinkedOpenHashSet(final double[] a, final float f) {
this(a, 0, a.length, f);
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* copying the elements of an array.
*
* @param a an array to be copied into the new hash set.
*/
public DoubleLinkedOpenHashSet(final double[] a) {
this(a, DEFAULT_LOAD_FACTOR);
}
/** Creates a new empty hash set.
*
* @return a new empty hash set.
*/
public static DoubleLinkedOpenHashSet of() {
return new DoubleLinkedOpenHashSet ();
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* using the given element.
*
* @param e the element that the returned set will contain.
* @return a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor containing {@code e}.
*/
public static DoubleLinkedOpenHashSet of(final double e) {
DoubleLinkedOpenHashSet result = new DoubleLinkedOpenHashSet (1, DEFAULT_LOAD_FACTOR);
result.add(e);
return result;
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* using the elements given.
*
* @param e0 the first element.
* @param e1 the second element.
* @return a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor containing {@code e0} and {@code e1}.
* @throws IllegalArgumentException if there were duplicate entries.
*/
public static DoubleLinkedOpenHashSet of(final double e0, final double e1) {
DoubleLinkedOpenHashSet result = new DoubleLinkedOpenHashSet (2, DEFAULT_LOAD_FACTOR);
result.add(e0);
if (!result.add(e1)) {
throw new IllegalArgumentException("Duplicate element: " + e1);
}
return result;
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* using the elements given.
*
* @param e0 the first element.
* @param e1 the second element.
* @param e2 the third element.
* @return a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor containing {@code e0}, {@code e1}, and {@code e2}.
* @throws IllegalArgumentException if there were duplicate entries.
*/
public static DoubleLinkedOpenHashSet of(final double e0, final double e1, final double e2) {
DoubleLinkedOpenHashSet result = new DoubleLinkedOpenHashSet (3, DEFAULT_LOAD_FACTOR);
result.add(e0);
if (!result.add(e1)) {
throw new IllegalArgumentException("Duplicate element: " + e1);
}
if (!result.add(e2)) {
throw new IllegalArgumentException("Duplicate element: " + e2);
}
return result;
}
/** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* using a list of elements.
*
* @param a a list of elements that will be used to initialize the new hash set.
* @return a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor containing the elements of {@code a}.
* @throws IllegalArgumentException if a duplicate entry was encountered.
*/
public static DoubleLinkedOpenHashSet of(final double... a) {
DoubleLinkedOpenHashSet result = new DoubleLinkedOpenHashSet (a.length, DEFAULT_LOAD_FACTOR);
for (double element : a) {
if (!result.add(element)) {
throw new IllegalArgumentException("Duplicate element " + element);
}
}
return result;
}
/** Collects the result of a primitive {@code Stream} into a new hash set.
*
*
This method performs a terminal operation on the given {@code Stream}
*
* @apiNote Taking a primitive stream instead of returning something like a
* {@link java.util.stream.Collector Collector} is necessary because there is no
* primitive {@code Collector} equivalent in the Java API.
*/
public static DoubleLinkedOpenHashSet toSet(java.util.stream.DoubleStream stream) {
return stream.collect(
DoubleLinkedOpenHashSet::new,
DoubleLinkedOpenHashSet::add,
DoubleLinkedOpenHashSet::addAll);
}
/** Collects the result of a primitive {@code Stream} into a new hash set, potentially pre-allocated to handle the given size.
*
*
This method performs a terminal operation on the given {@code Stream}
*
* @apiNote Taking a primitive stream instead returning something like a
* {@link java.util.stream.Collector Collector} is necessary because there is no
* primitive {@code Collector} equivalent in the Java API.
*/
public static DoubleLinkedOpenHashSet toSetWithExpectedSize(java.util.stream.DoubleStream stream, int expectedSize) {
if (expectedSize <= Hash.DEFAULT_INITIAL_SIZE) {
// Already below default capacity. Just use all default construction instead of fiddling with atomics in SizeDecreasingSupplier
return toSet(stream);
}
return stream.collect(
new DoubleCollections.SizeDecreasingSupplier<
DoubleLinkedOpenHashSet >(
expectedSize, (int size) ->
size <= Hash.DEFAULT_INITIAL_SIZE ? new DoubleLinkedOpenHashSet () : new DoubleLinkedOpenHashSet (size)),
DoubleLinkedOpenHashSet::add,
DoubleLinkedOpenHashSet::addAll);
}
private int realSize() {
return containsNull ? size - 1 : size;
}
/** Ensures that this set can hold a certain number of elements without rehashing.
*
* @param capacity a number of elements; there will be no rehashing unless
* the set {@linkplain #size() size} exceeds this number.
*/
public void ensureCapacity(final int capacity) {
final int needed = arraySize(capacity, f);
if (needed > n) rehash(needed);
}
private void tryCapacity(final long capacity) {
final int needed = (int)Math.min(1 << 30, Math.max(2, HashCommon.nextPowerOfTwo((long)Math.ceil(capacity / f))));
if (needed > n) rehash(needed);
}
@Override
public boolean addAll(DoubleCollection c) {
if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements
else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() elements
return super.addAll(c);
}
@Override
public boolean addAll(Collection extends Double> c) {
// The resulting collection will be at least c.size() big
if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements
else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() elements
return super.addAll(c);
}
@Override
public boolean add(final double k) {
int pos;
if (( Double.doubleToLongBits(k) == 0 )) {
if (containsNull) return false;
pos = n;
containsNull = true;
}
else {
double curr;
final double[] key = this.key;
// The starting point.
if (! ( Double.doubleToLongBits(curr = key[pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(k) ) & mask]) == 0 )) {
if (( Double.doubleToLongBits(curr) == Double.doubleToLongBits(k) )) return false;
while(! ( Double.doubleToLongBits(curr = key[pos = (pos + 1) & mask]) == 0 ))
if (( Double.doubleToLongBits(curr) == Double.doubleToLongBits(k) )) return false;
}
key[pos] = k;
}
if (size == 0) {
first = last = pos;
// Special case of SET_UPPER_LOWER(link[pos], -1, -1);
link[pos] = -1L;
}
else {
link[last] ^= ( ( link[last] ^ ( pos & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[pos] = ( ( last & 0xFFFFFFFFL ) << 32 ) | ( -1 & 0xFFFFFFFFL );
last = pos;
}
if (size++ >= maxFill) rehash(arraySize(size + 1, f));
if (ASSERTS) checkTable();
return true;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry.
*
* @param pos a starting position.
*/
protected final void shiftKeys(int pos) {
// Shift entries with the same hash.
int last, slot;
double curr;
final double[] key = this.key;
for(;;) {
pos = ((last = pos) + 1) & mask;
for(;;) {
if (( Double.doubleToLongBits(curr = key[pos]) == 0 )) {
key[last] = (0);
return;
}
slot = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(curr) ) & mask;
if (last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos) break;
pos = (pos + 1) & mask;
}
key[last] = curr;
fixPointers(pos, last);
}
}
private boolean removeEntry(final int pos) {
size--;
fixPointers(pos);
shiftKeys(pos);
if (n > minN && size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE) rehash(n / 2);
return true;
}
private boolean removeNullEntry() {
containsNull = false;
key[n] = (0);
size--;
fixPointers(n);
if (n > minN && size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE) rehash(n / 2);
return true;
}
@Override
public boolean remove(final double k) {
if (( Double.doubleToLongBits(k) == 0 )) {
if (containsNull) return removeNullEntry();
return false;
}
double curr;
final double[] key = this.key;
int pos;
// The starting point.
if (( Double.doubleToLongBits(curr = key[pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(k) ) & mask]) == 0 )) return false;
if (( Double.doubleToLongBits(k) == Double.doubleToLongBits(curr) )) return removeEntry(pos);
while(true) {
if (( Double.doubleToLongBits(curr = key[pos = (pos + 1) & mask]) == 0 )) return false;
if (( Double.doubleToLongBits(k) == Double.doubleToLongBits(curr) )) return removeEntry(pos);
}
}
@Override
public boolean contains(final double k) {
if (( Double.doubleToLongBits(k) == 0 )) return containsNull;
double curr;
final double[] key = this.key;
int pos;
// The starting point.
if (( Double.doubleToLongBits(curr = key[pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(k) ) & mask]) == 0 )) return false;
if (( Double.doubleToLongBits(k) == Double.doubleToLongBits(curr) )) return true;
while(true) {
if (( Double.doubleToLongBits(curr = key[pos = (pos + 1) & mask]) == 0 )) return false;
if (( Double.doubleToLongBits(k) == Double.doubleToLongBits(curr) )) return true;
}
}
/** Removes the first key in iteration order.
* @return the first key.
* @throws NoSuchElementException is this set is empty.
*/
public double removeFirstDouble() {
if (size == 0) throw new NoSuchElementException();
final int pos = first;
// Abbreviated version of fixPointers(pos)
if (size == 1) first = last = -1;
else {
first = (int) link[pos];
if (0 <= first) {
// Special case of SET_PREV(link[first], -1)
link[first] |= (-1 & 0xFFFFFFFFL) << 32;
}
}
final double k = key[pos];
size--;
if (( Double.doubleToLongBits(k) == 0 )) {
containsNull = false;
key[n] = (0);
}
else shiftKeys(pos);
if (n > minN && size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE) rehash(n / 2);
return k;
}
/** Removes the the last key in iteration order.
* @return the last key.
* @throws NoSuchElementException is this set is empty.
*/
public double removeLastDouble() {
if (size == 0) throw new NoSuchElementException();
final int pos = last;
// Abbreviated version of fixPointers(pos)
if (size == 1) first = last = -1;
else {
last = (int) ( link[pos] >>> 32 );
if (0 <= last) {
// Special case of SET_NEXT(link[last], -1)
link[last] |= -1 & 0xFFFFFFFFL;
}
}
final double k = key[pos];
size--;
if (( Double.doubleToLongBits(k) == 0 )) {
containsNull = false;
key[n] = (0);
}
else shiftKeys(pos);
if (n > minN && size < maxFill / 4 && n > DEFAULT_INITIAL_SIZE) rehash(n / 2);
return k;
}
private void moveIndexToFirst(final int i) {
if (size == 1 || first == i) return;
if (last == i) {
last = (int) ( link[i] >>> 32 );
// Special case of SET_NEXT(link[last], -1);
link[last] |= -1 & 0xFFFFFFFFL;
}
else {
final long linki = link[i];
final int prev = (int) ( linki >>> 32 );
final int next = (int) linki;
link[prev] ^= ( ( link[prev] ^ ( linki & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[next] ^= ( ( link[next] ^ ( linki & 0xFFFFFFFF00000000L ) ) & 0xFFFFFFFF00000000L );
}
link[first] ^= ( ( link[first] ^ ( ( i & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L );
link[i] = ( ( -1 & 0xFFFFFFFFL ) << 32 ) | ( first & 0xFFFFFFFFL );
first = i;
}
private void moveIndexToLast(final int i) {
if (size == 1 || last == i) return;
if (first == i) {
first = (int) link[i];
// Special case of SET_PREV(link[first], -1);
link[first] |= (-1 & 0xFFFFFFFFL) << 32;
}
else {
final long linki = link[i];
final int prev = (int) ( linki >>> 32 );
final int next = (int) linki;
link[prev] ^= ( ( link[prev] ^ ( linki & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[next] ^= ( ( link[next] ^ ( linki & 0xFFFFFFFF00000000L ) ) & 0xFFFFFFFF00000000L );
}
link[last] ^= ( ( link[last] ^ ( i & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[i] = ( ( last & 0xFFFFFFFFL ) << 32 ) | ( -1 & 0xFFFFFFFFL );
last = i;
}
/** Adds a key to the set; if the key is already present, it is moved to the first position of the iteration order.
*
* @param k the key.
* @return true if the key was not present.
*/
public boolean addAndMoveToFirst(final double k) {
int pos;
if (( Double.doubleToLongBits(k) == 0 )) {
if (containsNull) {
moveIndexToFirst(n);
return false;
}
containsNull = true;
pos = n;
}
else {
// The starting point.
final double key[] = this.key;
pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(k) ) & mask;
// There's always an unused entry. TODO
while(! ( Double.doubleToLongBits(key[pos]) == 0 )) {
if (( Double.doubleToLongBits(k) == Double.doubleToLongBits(key[pos]) )) {
moveIndexToFirst(pos);
return false;
}
pos = (pos + 1) & mask;
}
}
key[pos] = k;
if (size == 0) {
first = last = pos;
// Special case of SET_UPPER_LOWER(link[pos], -1, -1);
link[pos] = -1L;
}
else {
link[first] ^= ( ( link[first] ^ ( ( pos & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L );
link[pos] = ( ( -1 & 0xFFFFFFFFL ) << 32 ) | ( first & 0xFFFFFFFFL );
first = pos;
}
if (size++ >= maxFill) rehash(arraySize(size, f));
if (ASSERTS) checkTable();
return true;
}
/** Adds a key to the set; if the key is already present, it is moved to the last position of the iteration order.
*
* @param k the key.
* @return true if the key was not present.
*/
public boolean addAndMoveToLast(final double k) {
int pos;
if (( Double.doubleToLongBits(k) == 0 )) {
if (containsNull) {
moveIndexToLast(n);
return false;
}
containsNull = true;
pos = n;
}
else {
// The starting point.
final double key[] = this.key;
pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(k) ) & mask;
// There's always an unused entry.
while(! ( Double.doubleToLongBits(key[pos]) == 0 )) {
if (( Double.doubleToLongBits(k) == Double.doubleToLongBits(key[pos]) )) {
moveIndexToLast(pos);
return false;
}
pos = (pos + 1) & mask;
}
}
key[pos] = k;
if (size == 0) {
first = last = pos;
// Special case of SET_UPPER_LOWER(link[pos], -1, -1);
link[pos] = -1L;
}
else {
link[last] ^= ( ( link[last] ^ ( pos & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[pos] = ( ( last & 0xFFFFFFFFL ) << 32 ) | ( -1 & 0xFFFFFFFFL );
last = pos;
}
if (size++ >= maxFill) rehash(arraySize(size, f));
if (ASSERTS) checkTable();
return true;
}
/* Removes all elements from this set.
*
*
To increase object reuse, this method does not change the table size.
* If you want to reduce the table size, you must use {@link #trim()}.
*
*/
@Override
public void clear() {
if (size == 0) return;
size = 0;
containsNull = false;
Arrays.fill(key, (0));
first = last = -1;
}
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size == 0;
}
/** Modifies the {@link #link} vector so that the given entry is removed.
* This method will complete in constant time.
*
* @param i the index of an entry.
*/
protected void fixPointers(final int i) {
if (size == 0) {
first = last = -1;
return;
}
if (first == i) {
first = (int) link[i];
if (0 <= first) {
// Special case of SET_PREV(link[first], -1)
link[first] |= (-1 & 0xFFFFFFFFL) << 32;
}
return;
}
if (last == i) {
last = (int) ( link[i] >>> 32 );
if (0 <= last) {
// Special case of SET_NEXT(link[last], -1)
link[last] |= -1 & 0xFFFFFFFFL;
}
return;
}
final long linki = link[i];
final int prev = (int) ( linki >>> 32 );
final int next = (int) linki;
link[prev] ^= ( ( link[prev] ^ ( linki & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[next] ^= ( ( link[next] ^ ( linki & 0xFFFFFFFF00000000L ) ) & 0xFFFFFFFF00000000L );
}
/** Modifies the {@link #link} vector for a shift from s to d.
* This method will complete in constant time.
*
* @param s the source position.
* @param d the destination position.
*/
protected void fixPointers(int s, int d) {
if (size == 1) {
first = last = d;
// Special case of SET(link[d], -1, -1)
link[d] = -1L;
return;
}
if (first == s) {
first = d;
link[(int) link[s]] ^= ( ( link[(int) link[s]] ^ ( ( d & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L );
link[d] = link[s];
return;
}
if (last == s) {
last = d;
link[(int) ( link[s] >>> 32 )] ^= ( ( link[(int) ( link[s] >>> 32 )] ^ ( d & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[d] = link[s];
return;
}
final long links = link[s];
final int prev = (int) ( links >>> 32 );
final int next = (int) links;
link[prev] ^= ( ( link[prev] ^ ( d & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[next] ^= ( ( link[next] ^ ( ( d & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L );
link[d] = links;
}
/** Returns the first element of this set in iteration order.
*
* @return the first element in iteration order.
*/
@Override
public double firstDouble() {
if (size == 0) throw new NoSuchElementException();
return key[first];
}
/** Returns the last element of this set in iteration order.
*
* @return the last element in iteration order.
*/
@Override
public double lastDouble() {
if (size == 0) throw new NoSuchElementException();
return key[last];
}
/** {@inheritDoc}
* @implSpec This implementation just throws an {@link UnsupportedOperationException}.*/
@Override
public DoubleSortedSet tailSet(double from) { throw new UnsupportedOperationException(); }
/** {@inheritDoc}
* @implSpec This implementation just throws an {@link UnsupportedOperationException}.*/
@Override
public DoubleSortedSet headSet(double to) { throw new UnsupportedOperationException(); }
/** {@inheritDoc}
* @implSpec This implementation just throws an {@link UnsupportedOperationException}.*/
@Override
public DoubleSortedSet subSet(double from, double to) { throw new UnsupportedOperationException(); }
/** {@inheritDoc}
* @implSpec This implementation just returns {@code null}.*/
@Override
public DoubleComparator comparator() { return null; }
/** A list iterator over a linked set.
*
*
This class provides a list iterator over a linked hash set. The constructor runs in constant time.
*/
private final class SetIterator implements DoubleListIterator {
/** The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or {@code null} if no previous entry exists). */
int prev = -1;
/** The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or {@code null} if no next entry exists). */
int next = -1;
/** The last entry that was returned (or -1 if we did not iterate or used {@link #remove()}). */
int curr = -1;
/** The current index (in the sense of a {@link java.util.ListIterator}). When -1, we do not know the current index.*/
int index = -1;
SetIterator() {
next = first;
index = 0;
}
SetIterator(double from) {
if (( Double.doubleToLongBits(from) == 0 )) {
if (DoubleLinkedOpenHashSet.this.containsNull) {
next = (int) link[n];
prev = n;
return;
}
else throw new NoSuchElementException("The key " + from + " does not belong to this set.");
}
if (( Double.doubleToLongBits(key[last]) == Double.doubleToLongBits(from) )) {
prev = last;
index = size;
return;
}
// The starting point.
final double key[] = DoubleLinkedOpenHashSet.this.key;
int pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(from) ) & mask;
// There's always an unused entry.
while(! ( Double.doubleToLongBits(key[pos]) == 0 )) {
if (( Double.doubleToLongBits(key[pos]) == Double.doubleToLongBits(from) )) {
// Note: no valid index known.
next = (int) link[pos];
prev = pos;
return;
}
pos = (pos + 1) & mask;
}
throw new NoSuchElementException("The key " + from + " does not belong to this set.");
}
@Override
public boolean hasNext() { return next != -1; }
@Override
public boolean hasPrevious() { return prev != -1; }
@Override
public double nextDouble() {
if (! hasNext()) throw new NoSuchElementException();
curr = next;
next = (int) link[curr];
prev = curr;
if (index >= 0) index++;
if (ASSERTS) assert curr == n || ! ( Double.doubleToLongBits(key[curr]) == 0 ) : "Position " + curr + " is not used";
return key[curr];
}
@Override
public double previousDouble() {
if (! hasPrevious()) throw new NoSuchElementException();
curr = prev;
prev = (int) ( link[curr] >>> 32 );
next = curr;
if (index >= 0) index--;
return key[curr];
}
@Override
public void forEachRemaining(final java.util.function.DoubleConsumer action) {
final double key[] = DoubleLinkedOpenHashSet.this.key;
final long link[] = DoubleLinkedOpenHashSet.this.link;
while (next != -1) {
curr = next;
next = (int) link[curr];
prev = curr;
if (index >= 0) index++;
if (ASSERTS) assert curr == n || ! ( Double.doubleToLongBits(key[curr]) == 0 ) : "Position " + curr + " is not used";
action.accept(key[curr]);
}
}
private final void ensureIndexKnown() {
if (index >= 0) return;
if (prev == -1) {
index = 0;
return;
}
if (next == -1) {
index = size;
return;
}
int pos = first;
index = 1;
while(pos != prev) {
pos = (int) link[pos];
index++;
}
}
@Override
public int nextIndex() {
ensureIndexKnown();
return index;
}
@Override
public int previousIndex() {
ensureIndexKnown();
return index - 1;
}
@Override
public void remove() {
ensureIndexKnown();
if (curr == -1) throw new IllegalStateException();
if (curr == prev) {
/* If the last operation was a next(), we are removing an entry that preceeds
* the current index, and thus we must decrement it. */
index--;
prev = (int) ( link[curr] >>> 32 );
}
else
next = (int) link[curr];
size--;
/* Now we manually fix the pointers. Because of our knowledge of next
* and prev, this is going to be faster than calling fixPointers(). */
if (prev == -1) first = next;
else
link[prev] ^= ( ( link[prev] ^ ( next & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
if (next == -1) last = prev;
else
link[next] ^= ( ( link[next] ^ ( ( prev & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L );
int last, slot, pos = curr;
curr = -1;
if (pos == n) {
DoubleLinkedOpenHashSet.this.containsNull = false;
DoubleLinkedOpenHashSet.this.key[n] = (0);
}
else {
double curr;
final double[] key = DoubleLinkedOpenHashSet.this.key;
// We have to horribly duplicate the shiftKeys() code because we need to update next/prev.
for(;;) {
pos = ((last = pos) + 1) & mask;
for(;;) {
if (( Double.doubleToLongBits(curr = key[pos]) == 0 )) {
key[last] = (0);
return;
}
slot = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(curr) ) & mask;
if (last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos) break;
pos = (pos + 1) & mask;
}
key[last] = curr;
if (next == pos) next = last;
if (prev == pos) prev = last;
fixPointers(pos, last);
}
}
}
}
/** Returns a type-specific list iterator on the elements in this set, starting from a given element of the set.
* Please see the class documentation for implementation details.
*
* @param from an element to start from.
* @return a type-specific list iterator starting at the given element.
* @throws IllegalArgumentException if {@code from} does not belong to the set.
*/
@Override
public DoubleListIterator iterator(double from) {
return new SetIterator(from);
}
/** Returns a type-specific list iterator on the elements in this set, starting from the first element.
* Please see the class documentation for implementation details.
*
* @return a type-specific list iterator starting at the first element.
*/
@Override
public DoubleListIterator iterator() {
return new SetIterator();
}
private static final int SPLITERATOR_CHARACTERISTICS = DoubleSpliterators.SET_SPLITERATOR_CHARACTERISTICS | java.util.Spliterator.ORDERED;
/** {@inheritDoc}
*
*
There isn't a way to split efficiently while still preserving order for a linked data structure,
* so this implementation is just backed by the iterator. Thus, this spliterator is not well optimized
* for parallel streams.
*
*
Note, contrary to the specification of {@link java.util.SortedSet}, this spliterator does not,
* report {@link java.util.Spliterator#SORTED}. This is because iteration order is based on insertion
* order, not natural ordering.
*/
@Override
public DoubleSpliterator spliterator() {
return DoubleSpliterators.asSpliterator(
iterator(), it.unimi.dsi.fastutil.Size64.sizeOf(this), SPLITERATOR_CHARACTERISTICS);
}
@Override
public void forEach(final java.util.function.DoubleConsumer action) {
int curr;
int next = first;
while (next != -1) {
curr = next;
next = (int) link[curr];
if (ASSERTS) assert curr == n || ! ( Double.doubleToLongBits(key[curr]) == 0 ) : "Position " + curr + " is not used";
action.accept(key[curr]);
}
}
/** Rehashes this set, making the table as small as possible.
*
*
This method rehashes the table to the smallest size satisfying the
* load factor. It can be used when the set will not be changed anymore, so
* to optimize access speed and size.
*
*
If the table size is already the minimum possible, this method
* does nothing.
*
* @return true if there was enough memory to trim the set.
* @see #trim(int)
*/
public boolean trim() {
return trim(size);
}
/** Rehashes this set if the table is too large.
*
*
Let N be the smallest table size that can hold
* max(n,{@link #size()})
entries, still satisfying the load factor. If the current
* table size is smaller than or equal to N, this method does
* nothing. Otherwise, it rehashes this set in a table of size
* N.
*
*
This method is useful when reusing sets. {@linkplain #clear() Clearing a
* set} leaves the table size untouched. If you are reusing a set
* many times, you can call this method with a typical
* size to avoid keeping around a very large table just
* because of a few large transient sets.
*
* @param n the threshold for the trimming.
* @return true if there was enough memory to trim the set.
* @see #trim()
*/
public boolean trim(final int n) {
final int l = HashCommon.nextPowerOfTwo((int)Math.ceil(n / f));
if (l >= this.n || size > maxFill(l, f)) return true;
try {
rehash(l);
}
catch(OutOfMemoryError cantDoIt) { return false; }
return true;
}
/** Rehashes the set.
*
*
This method implements the basic rehashing strategy, and may be
* overriden by subclasses implementing different rehashing strategies (e.g.,
* disk-based rehashing). However, you should not override this method
* unless you understand the internal workings of this class.
*
* @param newN the new size
*/
protected void rehash(final int newN) {
final double key[] = this.key;
final int mask = newN - 1; // Note that this is used by the hashing macro
final double newKey[] = new double[newN + 1];
int i = first, prev = -1, newPrev = -1, t, pos;
final long link[] = this.link;
final long newLink[] = new long[newN + 1];
first = -1;
for(int j = size; j-- != 0;) {
if (( Double.doubleToLongBits(key[i]) == 0 )) pos = newN;
else {
pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(key[i]) ) & mask;
while (! ( Double.doubleToLongBits(newKey[pos]) == 0 )) pos = (pos + 1) & mask;
}
newKey[pos] = key[i];
if (prev != -1) {
newLink[newPrev] ^= ( ( newLink[newPrev] ^ ( pos & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
newLink[pos] ^= ( ( newLink[pos] ^ ( ( newPrev & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L );
newPrev = pos;
}
else {
newPrev = first = pos;
// Special case of SET(newLink[pos], -1, -1);
newLink[pos] = -1L;
}
t = i;
i = (int) link[i];
prev = t;
}
this.link = newLink;
this.last = newPrev;
if (newPrev != -1)
// Special case of SET_NEXT(newLink[newPrev], -1);
newLink[newPrev] |= -1 & 0xFFFFFFFFL;
n = newN;
this.mask = mask;
maxFill = maxFill(n, f);
this.key = newKey;
}
/** Returns a deep copy of this set.
*
*
This method performs a deep copy of this hash set; the data stored in the
* set, however, is not cloned. Note that this makes a difference only for object keys.
*
* @return a deep copy of this set.
*/
@Override
public DoubleLinkedOpenHashSet clone() {
DoubleLinkedOpenHashSet c;
try {
c = (DoubleLinkedOpenHashSet )super.clone();
}
catch(CloneNotSupportedException cantHappen) {
throw new InternalError();
}
c.key = key.clone();
c.containsNull = containsNull;
c.link = link.clone();
return c;
}
/** Returns a hash code for this set.
*
* This method overrides the generic method provided by the superclass.
* Since {@code equals()} is not overriden, it is important
* that the value returned by this method is the same value as
* the one returned by the overriden method.
*
* @return a hash code for this set.
*/
@Override
public int hashCode() {
int h = 0;
final double[] key = DoubleLinkedOpenHashSet.this.key;
for(int j = realSize(), i = 0; j-- != 0;) {
while(( Double.doubleToLongBits(key[i]) == 0 )) i++;
h += it.unimi.dsi.fastutil.HashCommon.double2int(key[i]);
i++;
}
// Zero / null have hash zero.
return h;
}
private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
final DoubleIterator i = iterator();
s.defaultWriteObject();
for(int j = size; j-- != 0;) s.writeDouble(i.nextDouble());
}
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
n = arraySize(size, f);
maxFill = maxFill(n, f);
mask = n - 1;
final double key[] = this.key = new double[n + 1];
final long link[] = this.link = new long[n + 1];
int prev = -1;
first = last = -1;
double k;
for(int i = size, pos; i-- != 0;) {
k = s.readDouble();
if (( Double.doubleToLongBits(k) == 0 )) {
pos = n;
containsNull = true;
}
else {
if (! ( Double.doubleToLongBits(key[pos = (int)it.unimi.dsi.fastutil.HashCommon.mix( Double.doubleToRawLongBits(k) ) & mask]) == 0 ))
while (! ( Double.doubleToLongBits(key[pos = (pos + 1) & mask]) == 0 ));
}
key[pos] = k;
if (first != -1) {
link[prev] ^= ( ( link[prev] ^ ( pos & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL );
link[pos] ^= ( ( link[pos] ^ ( ( prev & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L );
prev = pos;
}
else {
prev = first = pos;
// Special case of SET_PREV(newLink[pos], -1);
link[pos] |= (-1L & 0xFFFFFFFFL) << 32;
}
}
last = prev;
if (prev != -1)
// Special case of SET_NEXT(link[prev], -1);
link[prev] |= -1 & 0xFFFFFFFFL;
if (ASSERTS) checkTable();
}
private void checkTable() {}
}