All Downloads are FREE. Search and download functionalities are using the official Maven repository.

keycloakjar.org.springframework.util.ConcurrentLruCache Maven / Gradle / Ivy

There is a newer version: 7.22.0
Show newest version
/*
 * Copyright 2002-2023 the original author or authors.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.springframework.util;

import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLongArray;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Function;

import org.springframework.lang.Nullable;

/**
 * Simple LRU (Least Recently Used) cache, bounded by a specified cache capacity.
 * 

This is a simplified, opinionated implementation of an LRU cache for internal * use in Spring Framework. It is inspired from * ConcurrentLinkedHashMap. *

Read and write operations are internally recorded in dedicated buffers, * then drained at chosen times to avoid contention. * * @author Brian Clozel * @author Ben Manes * @since 5.3 * @param the type of the key used for cache retrieval * @param the type of the cached values, does not allow null values * @see #get(Object) */ @SuppressWarnings({"unchecked"}) public final class ConcurrentLruCache { private final int capacity; private final AtomicInteger currentSize = new AtomicInteger(); private final ConcurrentMap> cache; private final Function generator; private final ReadOperations readOperations; private final WriteOperations writeOperations; private final Lock evictionLock = new ReentrantLock(); /* * Queue that contains all ACTIVE cache entries, ordered with least recently used entries first. * Read and write operations are buffered and periodically processed to reorder the queue. */ private final EvictionQueue evictionQueue = new EvictionQueue<>(); private final AtomicReference drainStatus = new AtomicReference<>(DrainStatus.IDLE); /** * Create a new cache instance with the given capacity and generator function. * @param capacity the maximum number of entries in the cache * (0 indicates no caching, always generating a new value) * @param generator a function to generate a new value for a given key */ public ConcurrentLruCache(int capacity, Function generator) { this(capacity, generator, 16); } private ConcurrentLruCache(int capacity, Function generator, int concurrencyLevel) { Assert.isTrue(capacity >= 0, "Capacity must be >= 0"); this.capacity = capacity; this.cache = new ConcurrentHashMap<>(16, 0.75f, concurrencyLevel); this.generator = generator; this.readOperations = new ReadOperations<>(this.evictionQueue); this.writeOperations = new WriteOperations(); } /** * Retrieve an entry from the cache, potentially triggering generation of the value. * @param key the key to retrieve the entry for * @return the cached or newly generated value */ public V get(K key) { if (this.capacity == 0) { return this.generator.apply(key); } final Node node = this.cache.get(key); if (node == null) { V value = this.generator.apply(key); put(key, value); return value; } processRead(node); return node.getValue(); } private void put(K key, V value) { Assert.notNull(key, "key must not be null"); Assert.notNull(value, "value must not be null"); final CacheEntry cacheEntry = new CacheEntry<>(value, CacheEntryState.ACTIVE); final Node node = new Node<>(key, cacheEntry); final Node prior = this.cache.putIfAbsent(node.key, node); if (prior == null) { processWrite(new AddTask(node)); } else { processRead(prior); } } private void processRead(Node node) { boolean drainRequested = this.readOperations.recordRead(node); final DrainStatus status = this.drainStatus.get(); if (status.shouldDrainBuffers(drainRequested)) { drainOperations(); } } private void processWrite(Runnable task) { this.writeOperations.add(task); this.drainStatus.lazySet(DrainStatus.REQUIRED); drainOperations(); } private void drainOperations() { if (this.evictionLock.tryLock()) { try { this.drainStatus.lazySet(DrainStatus.PROCESSING); this.readOperations.drain(); this.writeOperations.drain(); } finally { this.drainStatus.compareAndSet(DrainStatus.PROCESSING, DrainStatus.IDLE); this.evictionLock.unlock(); } } } /** * Return the maximum number of entries in the cache. * @see #size() */ public int capacity() { return this.capacity; } /** * Return the maximum number of entries in the cache. * @deprecated in favor of {@link #capacity()} as of 6.0. */ @Deprecated(since = "6.0") public int sizeLimit() { return this.capacity; } /** * Return the current size of the cache. * @see #capacity() */ public int size() { return this.cache.size(); } /** * Immediately remove all entries from this cache. */ public void clear() { this.evictionLock.lock(); try { Node node; while ((node = this.evictionQueue.poll()) != null) { this.cache.remove(node.key, node); markAsRemoved(node); } this.readOperations.clear(); this.writeOperations.drainAll(); } finally { this.evictionLock.unlock(); } } /* * Transition the node to the {@code removed} state and decrement the current size of the cache. */ private void markAsRemoved(Node node) { for (; ; ) { CacheEntry current = node.get(); CacheEntry removed = new CacheEntry<>(current.value, CacheEntryState.REMOVED); if (node.compareAndSet(current, removed)) { this.currentSize.lazySet(this.currentSize.get() - 1); return; } } } /** * Determine whether the given key is present in this cache. * @param key the key to check for * @return {@code true} if the key is present, {@code false} if there was no matching key */ public boolean contains(K key) { return this.cache.containsKey(key); } /** * Immediately remove the given key and any associated value. * @param key the key to evict the entry for * @return {@code true} if the key was present before, * {@code false} if there was no matching key */ @Nullable public boolean remove(K key) { final Node node = this.cache.remove(key); if (node == null) { return false; } markForRemoval(node); processWrite(new RemovalTask(node)); return true; } /* * Transition the node from the {@code active} state to the {@code pending removal} state, * if the transition is valid. */ private void markForRemoval(Node node) { for (; ; ) { final CacheEntry current = node.get(); if (!current.isActive()) { return; } final CacheEntry pendingRemoval = new CacheEntry<>(current.value, CacheEntryState.PENDING_REMOVAL); if (node.compareAndSet(current, pendingRemoval)) { return; } } } /** * Write operation recorded when a new entry is added to the cache. */ private final class AddTask implements Runnable { final Node node; AddTask(Node node) { this.node = node; } @Override public void run() { currentSize.lazySet(currentSize.get() + 1); if (this.node.get().isActive()) { evictionQueue.add(this.node); evictEntries(); } } private void evictEntries() { while (currentSize.get() > capacity) { final Node node = evictionQueue.poll(); if (node == null) { return; } cache.remove(node.key, node); markAsRemoved(node); } } } /** * Write operation recorded when an entry is removed to the cache. */ private final class RemovalTask implements Runnable { final Node node; RemovalTask(Node node) { this.node = node; } @Override public void run() { evictionQueue.remove(this.node); markAsRemoved(this.node); } } /* * Draining status for the read/write buffers. */ private enum DrainStatus { /* * No drain operation currently running. */ IDLE { @Override boolean shouldDrainBuffers(boolean delayable) { return !delayable; } }, /* * A drain operation is required due to a pending write modification. */ REQUIRED { @Override boolean shouldDrainBuffers(boolean delayable) { return true; } }, /* * A drain operation is in progress. */ PROCESSING { @Override boolean shouldDrainBuffers(boolean delayable) { return false; } }; /** * Determine whether the buffers should be drained. * @param delayable if a drain should be delayed until required * @return if a drain should be attempted */ abstract boolean shouldDrainBuffers(boolean delayable); } private enum CacheEntryState { ACTIVE, PENDING_REMOVAL, REMOVED } private record CacheEntry(V value, CacheEntryState state) { boolean isActive() { return this.state == CacheEntryState.ACTIVE; } } private static final class ReadOperations { private static final int BUFFER_COUNT = detectNumberOfBuffers(); private static int detectNumberOfBuffers() { int availableProcessors = Runtime.getRuntime().availableProcessors(); int nextPowerOfTwo = 1 << (Integer.SIZE - Integer.numberOfLeadingZeros(availableProcessors - 1)); return Math.min(4, nextPowerOfTwo); } private static final int BUFFERS_MASK = BUFFER_COUNT - 1; private static final int MAX_PENDING_OPERATIONS = 32; private static final int MAX_DRAIN_COUNT = 2 * MAX_PENDING_OPERATIONS; private static final int BUFFER_SIZE = 2 * MAX_DRAIN_COUNT; private static final int BUFFER_INDEX_MASK = BUFFER_SIZE - 1; /* * Number of operations recorded, for each buffer */ private final AtomicLongArray recordedCount = new AtomicLongArray(BUFFER_COUNT); /* * Number of operations read, for each buffer */ private final long[] readCount = new long[BUFFER_COUNT]; /* * Number of operations processed, for each buffer */ private final AtomicLongArray processedCount = new AtomicLongArray(BUFFER_COUNT); @SuppressWarnings("rawtypes") private final AtomicReferenceArray>[] buffers = new AtomicReferenceArray[BUFFER_COUNT]; private final EvictionQueue evictionQueue; ReadOperations(EvictionQueue evictionQueue) { this.evictionQueue = evictionQueue; for (int i = 0; i < BUFFER_COUNT; i++) { this.buffers[i] = new AtomicReferenceArray<>(BUFFER_SIZE); } } @SuppressWarnings("deprecation") // for Thread.getId() on JDK 19 private static int getBufferIndex() { return ((int) Thread.currentThread().getId()) & BUFFERS_MASK; } boolean recordRead(Node node) { int bufferIndex = getBufferIndex(); final long writeCount = this.recordedCount.get(bufferIndex); this.recordedCount.lazySet(bufferIndex, writeCount + 1); final int index = (int) (writeCount & BUFFER_INDEX_MASK); this.buffers[bufferIndex].lazySet(index, node); final long pending = (writeCount - this.processedCount.get(bufferIndex)); return (pending < MAX_PENDING_OPERATIONS); } @SuppressWarnings("deprecation") // for Thread.getId() on JDK 19 void drain() { final int start = (int) Thread.currentThread().getId(); final int end = start + BUFFER_COUNT; for (int i = start; i < end; i++) { drainReadBuffer(i & BUFFERS_MASK); } } void clear() { for (int i = 0; i < BUFFER_COUNT; i++) { AtomicReferenceArray> buffer = this.buffers[i]; for (int j = 0; j < BUFFER_SIZE; j++) { buffer.lazySet(j, null); } } } private void drainReadBuffer(int bufferIndex) { final long writeCount = this.recordedCount.get(bufferIndex); for (int i = 0; i < MAX_DRAIN_COUNT; i++) { final int index = (int) (this.readCount[bufferIndex] & BUFFER_INDEX_MASK); final AtomicReferenceArray> buffer = this.buffers[bufferIndex]; final Node node = buffer.get(index); if (node == null) { break; } buffer.lazySet(index, null); this.evictionQueue.moveToBack(node); this.readCount[bufferIndex]++; } this.processedCount.lazySet(bufferIndex, writeCount); } } private static final class WriteOperations { private static final int DRAIN_THRESHOLD = 16; private final Queue operations = new ConcurrentLinkedQueue<>(); public void add(Runnable task) { this.operations.add(task); } public void drain() { for (int i = 0; i < DRAIN_THRESHOLD; i++) { final Runnable task = this.operations.poll(); if (task == null) { break; } task.run(); } } public void drainAll() { Runnable task; while ((task = this.operations.poll()) != null) { task.run(); } } } @SuppressWarnings("serial") private static final class Node extends AtomicReference> { final K key; @Nullable Node prev; @Nullable Node next; Node(K key, CacheEntry cacheEntry) { super(cacheEntry); this.key = key; } @Nullable public Node getPrevious() { return this.prev; } public void setPrevious(@Nullable Node prev) { this.prev = prev; } @Nullable public Node getNext() { return this.next; } public void setNext(@Nullable Node next) { this.next = next; } V getValue() { return get().value; } } private static final class EvictionQueue { @Nullable Node first; @Nullable Node last; @Nullable Node poll() { if (this.first == null) { return null; } final Node f = this.first; final Node next = f.getNext(); f.setNext(null); this.first = next; if (next == null) { this.last = null; } else { next.setPrevious(null); } return f; } void add(Node e) { if (contains(e)) { return; } linkLast(e); } private boolean contains(Node e) { return (e.getPrevious() != null) || (e.getNext() != null) || (e == this.first); } private void linkLast(final Node e) { final Node l = this.last; this.last = e; if (l == null) { this.first = e; } else { l.setNext(e); e.setPrevious(l); } } private void unlink(Node e) { final Node prev = e.getPrevious(); final Node next = e.getNext(); if (prev == null) { this.first = next; } else { prev.setNext(next); e.setPrevious(null); } if (next == null) { this.last = prev; } else { next.setPrevious(prev); e.setNext(null); } } void moveToBack(Node e) { if (contains(e) && e != this.last) { unlink(e); linkLast(e); } } void remove(Node e) { if (contains(e)) { unlink(e); } } } }





© 2015 - 2024 Weber Informatics LLC | Privacy Policy