All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.conqat.lib.commons.concurrent.MoreExecutors Maven / Gradle / Ivy

The newest version!
package org.conqat.lib.commons.concurrent;

import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedTransferQueue;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.checkerframework.checker.nullness.qual.NonNull;

/**
 * Factory class for more {@link ExecutorService}s.
 */
public class MoreExecutors {

	private static final int DEFAULT_KEEP_ALIVE_TIME = 60;

	private static final TimeUnit DEFAULT_KEEP_ALIVE_UNIT = TimeUnit.SECONDS;

	/**
	 * Creates a new {@link ExecutorService} that executes each submitted task using one of possibly
	 * several pooled threads. Threads are kept idle for {@value #DEFAULT_KEEP_ALIVE_TIME}
	 * {@value #DEFAULT_KEEP_ALIVE_UNIT}, before they are terminated.
	 * 

* Thread pools address two different problems: they usually provide improved performance when * executing large numbers of asynchronous tasks, due to reduced per-task invocation overhead, and * they provide a means of bounding and managing the resources, including threads, consumed when * executing a collection of tasks. *

* In contrast to the default {@link ThreadPoolExecutor}, this implementation creates new threads, * when all current threads are busy (and #threads < {@code maximumPoolSize}), while still using an * unbounded queue. * * @param corePoolSize * the number of threads to keep in the pool, even if they are idle * @param maximumPoolSize * the maximum number of threads to allow in the pool * @throws IllegalArgumentException * if one of the following holds:
* {@code corePoolSize < 0}
* {@code maximumPoolSize <= 0}
* {@code maximumPoolSize < corePoolSize} * @see Stack Overflow answer */ public static ExecutorService newCachedThreadPool(int corePoolSize, int maximumPoolSize) { return newCachedThreadPool(corePoolSize, maximumPoolSize, DEFAULT_KEEP_ALIVE_TIME, DEFAULT_KEEP_ALIVE_UNIT); } /** * Creates a new {@link ExecutorService} that executes each submitted task using one of possibly * several pooled threads. *

* Thread pools address two different problems: they usually provide improved performance when * executing large numbers of asynchronous tasks, due to reduced per-task invocation overhead, and * they provide a means of bounding and managing the resources, including threads, consumed when * executing a collection of tasks. *

* In contrast to the default {@link ThreadPoolExecutor}, this implementation creates new threads, * when all current threads are busy (and #threads < {@code maximumPoolSize}), while still using an * unbounded queue. * * @param corePoolSize * the number of threads to keep in the pool, even if they are idle * @param maximumPoolSize * the maximum number of threads to allow in the pool * @param keepAliveTime * when the number of threads is greater than the core, this is the maximum time that * excess idle threads will wait for new tasks before terminating. * @param unit * the time unit for the {@code keepAliveTime} argument * @throws IllegalArgumentException * if one of the following holds:
* {@code corePoolSize < 0}
* {@code keepAliveTime < 0}
* {@code maximumPoolSize <= 0}
* {@code maximumPoolSize < corePoolSize} * @see Stack Overflow answer */ public static ExecutorService newCachedThreadPool(int corePoolSize, int maximumPoolSize, int keepAliveTime, TimeUnit unit) { return newCachedThreadPool(corePoolSize, maximumPoolSize, keepAliveTime, unit, Executors.defaultThreadFactory()); } /** * Creates a new {@link ExecutorService} that executes each submitted task using one of possibly * several pooled threads. Threads are kept idle for {@value #DEFAULT_KEEP_ALIVE_TIME} * {@value #DEFAULT_KEEP_ALIVE_UNIT}, before they are terminated. *

* Thread pools address two different problems: they usually provide improved performance when * executing large numbers of asynchronous tasks, due to reduced per-task invocation overhead, and * they provide a means of bounding and managing the resources, including threads, consumed when * executing a collection of tasks. *

* In contrast to the default {@link ThreadPoolExecutor}, this implementation creates new threads, * when all current threads are busy (and #threads < {@code maximumPoolSize}), while still using an * unbounded queue. * * @param corePoolSize * the number of threads to keep in the pool, even if they are idle * @param maximumPoolSize * the maximum number of threads to allow in the pool * @param threadFactory * the factory to use when the executor creates a new thread * @throws IllegalArgumentException * if one of the following holds:
* {@code corePoolSize < 0}
* {@code maximumPoolSize <= 0}
* {@code maximumPoolSize < corePoolSize} * @throws NullPointerException * if {@code threadFactory} is null * @see Stack Overflow answer */ public static ExecutorService newCachedThreadPool(int corePoolSize, int maximumPoolSize, ThreadFactory threadFactory) { return newCachedThreadPool(corePoolSize, maximumPoolSize, DEFAULT_KEEP_ALIVE_TIME, DEFAULT_KEEP_ALIVE_UNIT, threadFactory); } /** * Creates a new {@link ExecutorService} that executes each submitted task using one of possibly * several pooled threads. *

* Thread pools address two different problems: they usually provide improved performance when * executing large numbers of asynchronous tasks, due to reduced per-task invocation overhead, and * they provide a means of bounding and managing the resources, including threads, consumed when * executing a collection of tasks. *

* In contrast to the default {@link ThreadPoolExecutor}, this implementation creates new threads, * when all current threads are busy (and #threads < {@code maximumPoolSize}), while still using an * unbounded queue. * * @param corePoolSize * the number of threads to keep in the pool, even if they are idle * @param maximumPoolSize * the maximum number of threads to allow in the pool * @param keepAliveTime * when the number of threads is greater than the core, this is the maximum time that * excess idle threads will wait for new tasks before terminating. * @param unit * the time unit for the {@code keepAliveTime} argument * @param threadFactory * the factory to use when the executor creates a new thread * @throws IllegalArgumentException * if one of the following holds:
* {@code corePoolSize < 0}
* {@code keepAliveTime < 0}
* {@code maximumPoolSize <= 0}
* {@code maximumPoolSize < corePoolSize} * @throws NullPointerException * if {@code threadFactory} is null * @see Stack Overflow answer */ public static ExecutorService newCachedThreadPool(int corePoolSize, int maximumPoolSize, int keepAliveTime, TimeUnit unit, ThreadFactory threadFactory) { LinkedTransferQueue queue = new LinkedTransferQueue() { private static final long serialVersionUID = -5211474297281157743L; @Override public boolean offer(@NonNull Runnable runnable) { // Try to transfer the task to a currently idle thread. // Returns false if no idle thread is available, which will either spawn a new // thread (if poolSize < maximumPoolSize) or reject the task. // If a task is rejected, our own RejectedExecutionHandler (see next statement) // simply puts it into the queue, so that the next idle thread can execute it return tryTransfer(runnable); } }; RejectedExecutionHandler rejectedExecutionHandler = (r, executor) -> queue.put(r); return new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, queue, threadFactory, rejectedExecutionHandler); } }





© 2015 - 2024 Weber Informatics LLC | Privacy Policy