All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.carrotsearch.hppcrt.sets.LongHashSet Maven / Gradle / Ivy

Go to download

High Performance Primitive Collections Realtime (fork of HPPC from Carrotsearch) Fundamental data structures (maps, sets, lists, queues, heaps, sorts) generated for combinations of object and primitive types to conserve JVM memory and speed up execution. The Realtime fork intends to extend the existing collections, by tweaking to remove any dynamic allocations at runtime, and to obtain low variance execution times whatever the input nature.

The newest version!
package com.carrotsearch.hppcrt.sets;

import com.carrotsearch.hppcrt.*;
import com.carrotsearch.hppcrt.cursors.*;
import com.carrotsearch.hppcrt.predicates.*;
import com.carrotsearch.hppcrt.procedures.*;
import com.carrotsearch.hppcrt.hash.*;

  
  
  
// If RH is defined, RobinHood Hashing is in effect
  

/**
 * A hash set of longs, implemented using using open
 * addressing with linear probing for collision resolution.
 *
 *
 * 

* The internal buffers of this implementation ({@link #keys}, etc...) * are always allocated to the nearest size that is a power of two. When * the capacity exceeds the given load factor, the buffer size is doubled. *

* * * * * */ @javax.annotation.Generated( date = "2017-07-11T19:16:33+0200", value = "KTypeHashSet.java") public class LongHashSet extends AbstractLongCollection implements LongLookupContainer, LongSet, Cloneable { /** * Hash-indexed array holding all set entries. *

* Direct set iteration: iterate {keys[i]} for i in [0; keys.length[ where keys[i] != 0/null, then also * {0/null} is in the set if {@link #allocatedDefaultKey} = true. *

*/ public long [] keys; /** * True if key = 0/null is in the map. */ public boolean allocatedDefaultKey = false; /** * Cached number of assigned slots in {@link #keys}. */ protected int assigned; /** * The load factor for this map (fraction of allocated slots * before the buffers must be rehashed or reallocated). */ protected final double loadFactor; /** * Resize buffers when {@link #keys} hits this value. */ private int resizeAt; /** * Per-instance perturbation * introduced in rehashing to create a unique key distribution. */ private final int perturbation = Containers.randomSeed32(); /** * Default constructor: Creates a hash set with the default capacity of {@link Containers#DEFAULT_EXPECTED_ELEMENTS}, * load factor of {@link HashContainers#DEFAULT_LOAD_FACTOR}. */ public LongHashSet() { this(Containers.DEFAULT_EXPECTED_ELEMENTS, HashContainers.DEFAULT_LOAD_FACTOR); } /** * Creates a hash set with the given capacity, * load factor of {@link HashContainers#DEFAULT_LOAD_FACTOR}. */ public LongHashSet(final int initialCapacity) { this(initialCapacity, HashContainers.DEFAULT_LOAD_FACTOR); } /** * Creates a hash set with the given capacity and load factor. */ public LongHashSet(final int initialCapacity, final double loadFactor) { this.loadFactor = loadFactor; //take into account of the load factor to guarantee no reallocations before reaching initialCapacity. allocateBuffers(HashContainers.minBufferSize(initialCapacity, loadFactor)); } /** * Creates a hash set from elements of another container. Default load factor is used. */ public LongHashSet(final LongContainer container) { this(container.size()); addAll(container); } /** * {@inheritDoc} */ @Override public boolean add(long key) { if (((key) == 0L)) { if (this.allocatedDefaultKey) { return false; } this.allocatedDefaultKey = true; return true; } final int mask = this.keys.length - 1; final long[] keys = ((this.keys)); int slot = (BitMixer.mix((key) , this.perturbation)) & mask; long existing; while (!((existing = keys[slot]) == 0L)) { if ( ((((key)) == ((existing))))) { return false; } slot = (slot + 1) & mask; } // Check if we need to grow. If so, reallocate new data, // fill in the last element and rehash. if (this.assigned == this.resizeAt) { expandAndAdd(key, slot); } else { this.assigned++; keys[slot] = key; } return true; } /** * Adds two elements to the set. */ public int add(final long e1, final long e2) { int count = 0; if (add(e1)) { count++; } if (add(e2)) { count++; } return count; } /** * Vararg-signature method for adding elements to this set. *

This method is handy, but costly if used in tight loops (anonymous * array passing)

* * @return Returns the number of elements that were added to the set * (were not present in the set). */ public int add(final long... elements) { int count = 0; for (final long e : elements) { if (add(e)) { count++; } } return count; } /** * {@inheritDoc} */ @Override public int addAll(final LongContainer container) { return addAll((Iterable) container); } /** * {@inheritDoc} */ @Override public int addAll(final Iterable iterable) { int count = 0; for (final LongCursor cursor : iterable) { if (add(cursor.value)) { count++; } } return count; } /** * Expand the internal storage buffers (capacity) or rehash current * keys and values if there are a lot of deleted slots. */ private void expandAndAdd(final long pendingKey, final int freeSlot) { assert this.assigned == this.resizeAt; //default sentinel value is never in the keys[] array, so never trigger reallocs assert (!((pendingKey) == 0L)); // Try to allocate new buffers first. If we OOM, it'll be now without // leaving the data structure in an inconsistent state. final long[] oldKeys = ((this.keys)); allocateBuffers(HashContainers.nextBufferSize(this.keys.length, this.assigned, this.loadFactor)); // We have succeeded at allocating new data so insert the pending key/value at // the free slot in the old arrays before rehashing. this.assigned++; oldKeys[freeSlot] = pendingKey; //Variables for adding final int mask = this.keys.length - 1; long key = (0L); //adding phase int slot = -1; final long[] keys = ((this.keys)); //iterate all the old arrays to add in the newly allocated buffers //It is important to iterate backwards to minimize the conflict chain length ! final int perturb = this.perturbation; for (int i = oldKeys.length; --i >= 0;) { //only consider non-empty slots, of course if (!((key = oldKeys[i]) == 0L)) { slot = (BitMixer.mix((key) , (perturb))) & mask; //similar to add(), except all inserted keys are known to be unique. while ((!(((keys)[(slot)]) == 0L))) { slot = (slot + 1) & mask; } //end while //place it at that position keys[slot] = key; } } } /** * Allocate internal buffers for a given capacity. * * @param capacity New capacity (must be a power of two). */ @SuppressWarnings("boxing") private void allocateBuffers(final int capacity) { try { final long[] keys = (new long[(capacity)]); this.keys = keys; //allocate so that there is at least one slot that remains allocated = false //this is compulsory to guarantee proper stop in searching loops this.resizeAt = HashContainers.expandAtCount(capacity, this.loadFactor); } catch (final OutOfMemoryError e) { throw new BufferAllocationException( "Not enough memory to allocate buffers to grow from %d -> %d elements", e, (this.keys == null) ? 0 : this.keys.length, capacity); } } /** * {@inheritDoc} */ @Override public int removeAll(final long key) { return remove(key) ? 1 : 0; } /** * {@inheritDoc} */ @Override public boolean remove(final long key) { if (((key) == 0L)) { if (this.allocatedDefaultKey) { this.allocatedDefaultKey = false; return true; } return false; } final int mask = this.keys.length - 1; final long[] keys = ((this.keys)); int slot = (BitMixer.mix((key) , this.perturbation)) & mask; long existing; while (!((existing = keys[slot]) == 0L) ) { if (((((key)) == ((existing))))) { shiftConflictingKeys(slot); return true; } slot = (slot + 1) & mask; } //end while true return false; } /** * Shift all the slot-conflicting keys allocated to (and including) slot. */ private void shiftConflictingKeys(int gapSlot) { final int mask = this.keys.length - 1; final long[] keys = ((this.keys)); final int perturb = this.perturbation; // Perform shifts of conflicting keys to fill in the gap. int distance = 0; while (true) { final int slot = (gapSlot + (++distance)) & mask; final long existing = keys[slot]; if (((existing) == 0L)) { break; } final int idealSlotModMask = (BitMixer.mix((existing) , (perturb))) & mask; //original HPPC code: shift = (slot - idealSlot) & mask; //equivalent to shift = (slot & mask - idealSlot & mask) & mask; //since slot and idealSlotModMask are already folded, we have : final int shift = (slot - idealSlotModMask) & mask; if (shift >= distance) { // Entry at this position was originally at or before the gap slot. // Move the conflict-shifted entry to the gap's position and repeat the procedure // for any entries to the right of the current position, treating it // as the new gap. keys[gapSlot] = existing; gapSlot = slot; distance = 0; } } //end while // Mark the last found gap slot without a conflict as empty. keys[gapSlot] = (0L); this.assigned--; } /** * {@inheritDoc} */ @Override public boolean contains(final long key) { if (((key) == 0L)) { return this.allocatedDefaultKey; } final int mask = this.keys.length - 1; final long[] keys = ((this.keys)); int slot = (BitMixer.mix((key) , this.perturbation)) & mask; long existing; while (!((existing = keys[slot]) == 0L) ) { if (((((key)) == ((existing))))) { return true; } slot = (slot + 1) & mask; } //end while true return false; } /** * {@inheritDoc} * *

Does not release internal buffers.

*/ @Override public void clear() { this.assigned = 0; // States are always cleared. this.allocatedDefaultKey = false; //Faster than Arrays.fill(keys, null); // Help the GC. LongArrays.blankArray(this.keys, 0, this.keys.length); } /** * {@inheritDoc} */ @Override public int size() { return this.assigned + (this.allocatedDefaultKey ? 1 : 0); } /** * {@inheritDoc} */ @Override public int capacity() { return this.resizeAt; } /** * {@inheritDoc} */ @Override public int hashCode() { int h = 0; //allocated default key has hash = 0 final long[] keys = ((this.keys)); for (int i = keys.length; --i >= 0;) { long existing; if (!((existing = keys[i]) == 0L)) { h += BitMixer.mix(existing); } } return h; } /** * {@inheritDoc} */ @Override public boolean equals(final Object obj) { if (obj != null) { if (obj == this) { return true; } //must be of the same class, subclasses are not comparable if (obj.getClass() != this.getClass()) { return false; } @SuppressWarnings("unchecked") final LongSet other = (LongSet) obj; //must be of the same size if (other.size() != this.size()) { return false; } final EntryIterator it = this.iterator(); while (it.hasNext()) { if (!other.contains(it.next().value)) { //recycle it.release(); return false; } } return true; } return false; } /** * An iterator implementation for {@link #iterator}. * Holds a LongCursor returning (value, index) = (long value, index the position in {@link LongHashSet#keys}, or keys.length for key = 0/null.) */ public final class EntryIterator extends AbstractIterator { public final LongCursor cursor; public EntryIterator() { this.cursor = new LongCursor(); this.cursor.index = -2; } /** * Iterate backwards w.r.t the buffer, to * minimize collision chains when filling another hash container (ex. with putAll()) */ @Override protected LongCursor fetch() { if (this.cursor.index == LongHashSet.this.keys.length + 1) { if (LongHashSet.this.allocatedDefaultKey) { this.cursor.index = LongHashSet.this.keys.length; this.cursor.value = (0L); return this.cursor; } //no value associated with the default key, continue iteration... this.cursor.index = LongHashSet.this.keys.length; } int i = this.cursor.index - 1; while (i >= 0 && !(!(((((LongHashSet.this.keys)))[(i)]) == 0L))) { i--; } if (i == -1) { return done(); } this.cursor.index = i; this.cursor.value = ((LongHashSet.this.keys[i])); return this.cursor; } } /** * internal pool of EntryIterator */ protected final IteratorPool entryIteratorPool = new IteratorPool( new ObjectFactory() { @Override public EntryIterator create() { return new EntryIterator(); } @Override public void initialize(final EntryIterator obj) { obj.cursor.index = LongHashSet.this.keys.length + 1; } @Override public void reset(final EntryIterator obj) { } }); /** * {@inheritDoc} * */ @Override public EntryIterator iterator() { //return new EntryIterator(); return this.entryIteratorPool.borrow(); } /** * {@inheritDoc} */ @Override public T forEach(final T procedure) { if (this.allocatedDefaultKey) { procedure.apply((0L)); } final long[] keys = ((this.keys)); //Iterate in reverse for side-stepping the longest conflict chain //in another hash, in case apply() is actually used to fill another hash container. for (int i = keys.length - 1; i >= 0; i--) { long existing; if (!((existing = keys[i]) == 0L)) { procedure.apply(existing); } } return procedure; } /** * {@inheritDoc} */ @Override public long[] toArray(final long[] target) { int count = 0; if (this.allocatedDefaultKey) { target[count++] = (0L); } final long[] keys = ((this.keys)); for (int i = 0; i < keys.length; i++) { long existing; if (!((existing = keys[i]) == 0L)) { target[count++] = existing; } } assert count == this.size(); return target; } /** * {@inheritDoc} */ @Override public LongHashSet clone() { //clone to size() to prevent eventual exponential growth final LongHashSet cloned = new LongHashSet(this.size(), this.loadFactor); //We must NOT clone, because of the independent perturbation seeds cloned.addAll(this); return cloned; } /** * {@inheritDoc} */ @Override public T forEach(final T predicate) { if (this.allocatedDefaultKey) { if (!predicate.apply((0L))) { return predicate; } } final long[] keys = ((this.keys)); //Iterate in reverse for side-stepping the longest conflict chain //in another hash, in case apply() is actually used to fill another hash container. for (int i = keys.length - 1; i >= 0; i--) { long existing; if (!((existing = keys[i]) == 0L)) { if (!predicate.apply(existing)) { break; } } } return predicate; } /** * {@inheritDoc} */ @Override public int removeAll(final LongPredicate predicate) { final int before = this.size(); if (this.allocatedDefaultKey) { if (predicate.apply((0L))) { this.allocatedDefaultKey = false; } } final long[] keys = ((this.keys)); for (int i = 0; i < keys.length;) { long existing; if (!((existing = keys[i]) == 0L) && predicate.apply(existing)) { shiftConflictingKeys(i); // Shift, do not increment slot. } else { i++; } } return before - this.size(); } /** * Create a set from a variable number of arguments or an array of long. */ public static LongHashSet from(final long... elements) { final LongHashSet set = new LongHashSet(elements.length); set.add(elements); return set; } /** * Create a set from elements of another container. */ public static LongHashSet from(final LongContainer container) { return new LongHashSet(container); } /** * Create a new hash set with default parameters (shortcut * instead of using a constructor). */ public static LongHashSet newInstance() { return new LongHashSet(); } /** * Returns a new object of this class with no need to declare generic type (shortcut * instead of using a constructor). */ public static LongHashSet newInstance(final int initialCapacity, final double loadFactor) { return new LongHashSet(initialCapacity, loadFactor); } //Test for existence in template }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy