io.netty.util.internal.PlatformDependent Maven / Gradle / Ivy
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.internal;
import io.netty.util.CharsetUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.MpscChunkedArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.MpscUnboundedArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.SpscLinkedQueue;
import io.netty.util.internal.shaded.org.jctools.queues.atomic.MpmcAtomicArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.atomic.MpscAtomicArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.atomic.MpscChunkedAtomicArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.atomic.MpscUnboundedAtomicArrayQueue;
import io.netty.util.internal.shaded.org.jctools.queues.atomic.SpscLinkedAtomicQueue;
import io.netty.util.internal.shaded.org.jctools.util.Pow2;
import io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Files;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Collections;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static io.netty.util.internal.PlatformDependent0.HASH_CODE_ASCII_SEED;
import static io.netty.util.internal.PlatformDependent0.HASH_CODE_C1;
import static io.netty.util.internal.PlatformDependent0.HASH_CODE_C2;
import static io.netty.util.internal.PlatformDependent0.hashCodeAsciiSanitize;
import static io.netty.util.internal.PlatformDependent0.unalignedAccess;
import static java.lang.Math.max;
import static java.lang.Math.min;
/**
* Utility that detects various properties specific to the current runtime
* environment, such as Java version and the availability of the
* {@code sun.misc.Unsafe} object.
*
* You can disable the use of {@code sun.misc.Unsafe} if you specify
* the system property io.netty.noUnsafe.
*/
public final class PlatformDependent {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(PlatformDependent.class);
private static Pattern MAX_DIRECT_MEMORY_SIZE_ARG_PATTERN;
private static final boolean MAYBE_SUPER_USER;
private static final boolean CAN_ENABLE_TCP_NODELAY_BY_DEFAULT = !isAndroid();
private static final Throwable UNSAFE_UNAVAILABILITY_CAUSE = unsafeUnavailabilityCause0();
private static final boolean DIRECT_BUFFER_PREFERRED;
private static final long MAX_DIRECT_MEMORY = estimateMaxDirectMemory();
private static final int MPSC_CHUNK_SIZE = 1024;
private static final int MIN_MAX_MPSC_CAPACITY = MPSC_CHUNK_SIZE * 2;
private static final int MAX_ALLOWED_MPSC_CAPACITY = Pow2.MAX_POW2;
private static final long BYTE_ARRAY_BASE_OFFSET = byteArrayBaseOffset0();
private static final File TMPDIR = tmpdir0();
private static final int BIT_MODE = bitMode0();
private static final String NORMALIZED_ARCH = normalizeArch(SystemPropertyUtil.get("os.arch", ""));
private static final String NORMALIZED_OS = normalizeOs(SystemPropertyUtil.get("os.name", ""));
// keep in sync with maven's pom.xml via os.detection.classifierWithLikes!
private static final String[] ALLOWED_LINUX_OS_CLASSIFIERS = {"fedora", "suse", "arch"};
private static final Set LINUX_OS_CLASSIFIERS;
private static final boolean IS_WINDOWS = isWindows0();
private static final boolean IS_OSX = isOsx0();
private static final boolean IS_J9_JVM = isJ9Jvm0();
private static final boolean IS_IVKVM_DOT_NET = isIkvmDotNet0();
private static final int ADDRESS_SIZE = addressSize0();
private static final boolean USE_DIRECT_BUFFER_NO_CLEANER;
private static final AtomicLong DIRECT_MEMORY_COUNTER;
private static final long DIRECT_MEMORY_LIMIT;
private static final ThreadLocalRandomProvider RANDOM_PROVIDER;
private static final Cleaner CLEANER;
private static final int UNINITIALIZED_ARRAY_ALLOCATION_THRESHOLD;
// For specifications, see https://www.freedesktop.org/software/systemd/man/os-release.html
private static final String[] OS_RELEASE_FILES = {"/etc/os-release", "/usr/lib/os-release"};
private static final String LINUX_ID_PREFIX = "ID=";
private static final String LINUX_ID_LIKE_PREFIX = "ID_LIKE=";
public static final boolean BIG_ENDIAN_NATIVE_ORDER = ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN;
private static final Cleaner NOOP = new Cleaner() {
@Override
public void freeDirectBuffer(ByteBuffer buffer) {
// NOOP
}
};
static {
if (javaVersion() >= 7) {
RANDOM_PROVIDER = new ThreadLocalRandomProvider() {
@Override
@SuppressJava6Requirement(reason = "Usage guarded by java version check")
public Random current() {
return java.util.concurrent.ThreadLocalRandom.current();
}
};
} else {
RANDOM_PROVIDER = new ThreadLocalRandomProvider() {
@Override
public Random current() {
return ThreadLocalRandom.current();
}
};
}
// Here is how the system property is used:
//
// * < 0 - Don't use cleaner, and inherit max direct memory from java. In this case the
// "practical max direct memory" would be 2 * max memory as defined by the JDK.
// * == 0 - Use cleaner, Netty will not enforce max memory, and instead will defer to JDK.
// * > 0 - Don't use cleaner. This will limit Netty's total direct memory
// (note: that JDK's direct memory limit is independent of this).
long maxDirectMemory = SystemPropertyUtil.getLong("io.netty.maxDirectMemory", -1);
if (maxDirectMemory == 0 || !hasUnsafe() || !PlatformDependent0.hasDirectBufferNoCleanerConstructor()) {
USE_DIRECT_BUFFER_NO_CLEANER = false;
DIRECT_MEMORY_COUNTER = null;
} else {
USE_DIRECT_BUFFER_NO_CLEANER = true;
if (maxDirectMemory < 0) {
maxDirectMemory = MAX_DIRECT_MEMORY;
if (maxDirectMemory <= 0) {
DIRECT_MEMORY_COUNTER = null;
} else {
DIRECT_MEMORY_COUNTER = new AtomicLong();
}
} else {
DIRECT_MEMORY_COUNTER = new AtomicLong();
}
}
logger.debug("-Dio.netty.maxDirectMemory: {} bytes", maxDirectMemory);
DIRECT_MEMORY_LIMIT = maxDirectMemory >= 1 ? maxDirectMemory : MAX_DIRECT_MEMORY;
int tryAllocateUninitializedArray =
SystemPropertyUtil.getInt("io.netty.uninitializedArrayAllocationThreshold", 1024);
UNINITIALIZED_ARRAY_ALLOCATION_THRESHOLD = javaVersion() >= 9 && PlatformDependent0.hasAllocateArrayMethod() ?
tryAllocateUninitializedArray : -1;
logger.debug("-Dio.netty.uninitializedArrayAllocationThreshold: {}", UNINITIALIZED_ARRAY_ALLOCATION_THRESHOLD);
MAYBE_SUPER_USER = maybeSuperUser0();
if (!isAndroid()) {
// only direct to method if we are not running on android.
// See https://github.com/netty/netty/issues/2604
if (javaVersion() >= 9) {
CLEANER = CleanerJava9.isSupported() ? new CleanerJava9() : NOOP;
} else {
CLEANER = CleanerJava6.isSupported() ? new CleanerJava6() : NOOP;
}
} else {
CLEANER = NOOP;
}
// We should always prefer direct buffers by default if we can use a Cleaner to release direct buffers.
DIRECT_BUFFER_PREFERRED = CLEANER != NOOP
&& !SystemPropertyUtil.getBoolean("io.netty.noPreferDirect", false);
if (logger.isDebugEnabled()) {
logger.debug("-Dio.netty.noPreferDirect: {}", !DIRECT_BUFFER_PREFERRED);
}
/*
* We do not want to log this message if unsafe is explicitly disabled. Do not remove the explicit no unsafe
* guard.
*/
if (CLEANER == NOOP && !PlatformDependent0.isExplicitNoUnsafe()) {
logger.info(
"Your platform does not provide complete low-level API for accessing direct buffers reliably. " +
"Unless explicitly requested, heap buffer will always be preferred to avoid potential system " +
"instability.");
}
final Set allowedClassifiers = Collections.unmodifiableSet(
new HashSet(Arrays.asList(ALLOWED_LINUX_OS_CLASSIFIERS)));
final Set availableClassifiers = new LinkedHashSet();
if (!addPropertyOsClassifiers(allowedClassifiers, availableClassifiers)) {
addFilesystemOsClassifiers(allowedClassifiers, availableClassifiers);
}
LINUX_OS_CLASSIFIERS = Collections.unmodifiableSet(availableClassifiers);
}
static void addFilesystemOsClassifiers(final Set allowedClassifiers,
final Set availableClassifiers) {
for (final String osReleaseFileName : OS_RELEASE_FILES) {
final File file = new File(osReleaseFileName);
boolean found = AccessController.doPrivileged(new PrivilegedAction() {
@Override
public Boolean run() {
try {
if (file.exists()) {
BufferedReader reader = null;
try {
reader = new BufferedReader(
new InputStreamReader(
new FileInputStream(file), CharsetUtil.UTF_8));
String line;
while ((line = reader.readLine()) != null) {
if (line.startsWith(LINUX_ID_PREFIX)) {
String id = normalizeOsReleaseVariableValue(
line.substring(LINUX_ID_PREFIX.length()));
addClassifier(allowedClassifiers, availableClassifiers, id);
} else if (line.startsWith(LINUX_ID_LIKE_PREFIX)) {
line = normalizeOsReleaseVariableValue(
line.substring(LINUX_ID_LIKE_PREFIX.length()));
addClassifier(allowedClassifiers, availableClassifiers, line.split("[ ]+"));
}
}
} catch (SecurityException e) {
logger.debug("Unable to read {}", osReleaseFileName, e);
} catch (IOException e) {
logger.debug("Error while reading content of {}", osReleaseFileName, e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ignored) {
// Ignore
}
}
}
// specification states we should only fall back if /etc/os-release does not exist
return true;
}
} catch (SecurityException e) {
logger.debug("Unable to check if {} exists", osReleaseFileName, e);
}
return false;
}
});
if (found) {
break;
}
}
}
static boolean addPropertyOsClassifiers(Set allowedClassifiers, Set availableClassifiers) {
// empty: -Dio.netty.osClassifiers (no distro specific classifiers for native libs)
// single ID: -Dio.netty.osClassifiers=ubuntu
// pair ID, ID_LIKE: -Dio.netty.osClassifiers=ubuntu,debian
// illegal otherwise
String osClassifiersPropertyName = "io.netty.osClassifiers";
String osClassifiers = SystemPropertyUtil.get(osClassifiersPropertyName);
if (osClassifiers == null) {
return false;
}
if (osClassifiers.isEmpty()) {
// let users omit classifiers with just -Dio.netty.osClassifiers
return true;
}
String[] classifiers = osClassifiers.split(",");
if (classifiers.length == 0) {
throw new IllegalArgumentException(
osClassifiersPropertyName + " property is not empty, but contains no classifiers: "
+ osClassifiers);
}
// at most ID, ID_LIKE classifiers
if (classifiers.length > 2) {
throw new IllegalArgumentException(
osClassifiersPropertyName + " property contains more than 2 classifiers: " + osClassifiers);
}
for (String classifier : classifiers) {
addClassifier(allowedClassifiers, availableClassifiers, classifier);
}
return true;
}
public static long byteArrayBaseOffset() {
return BYTE_ARRAY_BASE_OFFSET;
}
public static boolean hasDirectBufferNoCleanerConstructor() {
return PlatformDependent0.hasDirectBufferNoCleanerConstructor();
}
public static byte[] allocateUninitializedArray(int size) {
return UNINITIALIZED_ARRAY_ALLOCATION_THRESHOLD < 0 || UNINITIALIZED_ARRAY_ALLOCATION_THRESHOLD > size ?
new byte[size] : PlatformDependent0.allocateUninitializedArray(size);
}
/**
* Returns {@code true} if and only if the current platform is Android
*/
public static boolean isAndroid() {
return PlatformDependent0.isAndroid();
}
/**
* Return {@code true} if the JVM is running on Windows
*/
public static boolean isWindows() {
return IS_WINDOWS;
}
/**
* Return {@code true} if the JVM is running on OSX / MacOS
*/
public static boolean isOsx() {
return IS_OSX;
}
/**
* Return {@code true} if the current user may be a super-user. Be aware that this is just an hint and so it may
* return false-positives.
*/
public static boolean maybeSuperUser() {
return MAYBE_SUPER_USER;
}
/**
* Return the version of Java under which this library is used.
*/
public static int javaVersion() {
return PlatformDependent0.javaVersion();
}
/**
* Returns {@code true} if and only if it is fine to enable TCP_NODELAY socket option by default.
*/
public static boolean canEnableTcpNoDelayByDefault() {
return CAN_ENABLE_TCP_NODELAY_BY_DEFAULT;
}
/**
* Return {@code true} if {@code sun.misc.Unsafe} was found on the classpath and can be used for accelerated
* direct memory access.
*/
public static boolean hasUnsafe() {
return UNSAFE_UNAVAILABILITY_CAUSE == null;
}
/**
* Return the reason (if any) why {@code sun.misc.Unsafe} was not available.
*/
public static Throwable getUnsafeUnavailabilityCause() {
return UNSAFE_UNAVAILABILITY_CAUSE;
}
/**
* {@code true} if and only if the platform supports unaligned access.
*
* @see Wikipedia on segfault
*/
public static boolean isUnaligned() {
return PlatformDependent0.isUnaligned();
}
/**
* Returns {@code true} if the platform has reliable low-level direct buffer access API and a user has not specified
* {@code -Dio.netty.noPreferDirect} option.
*/
public static boolean directBufferPreferred() {
return DIRECT_BUFFER_PREFERRED;
}
/**
* Returns the maximum memory reserved for direct buffer allocation.
*/
public static long maxDirectMemory() {
return DIRECT_MEMORY_LIMIT;
}
/**
* Returns the current memory reserved for direct buffer allocation.
* This method returns -1 in case that a value is not available.
*
* @see #maxDirectMemory()
*/
public static long usedDirectMemory() {
return DIRECT_MEMORY_COUNTER != null ? DIRECT_MEMORY_COUNTER.get() : -1;
}
/**
* Returns the temporary directory.
*/
public static File tmpdir() {
return TMPDIR;
}
/**
* Returns the bit mode of the current VM (usually 32 or 64.)
*/
public static int bitMode() {
return BIT_MODE;
}
/**
* Return the address size of the OS.
* 4 (for 32 bits systems ) and 8 (for 64 bits systems).
*/
public static int addressSize() {
return ADDRESS_SIZE;
}
public static long allocateMemory(long size) {
return PlatformDependent0.allocateMemory(size);
}
public static void freeMemory(long address) {
PlatformDependent0.freeMemory(address);
}
public static long reallocateMemory(long address, long newSize) {
return PlatformDependent0.reallocateMemory(address, newSize);
}
/**
* Raises an exception bypassing compiler checks for checked exceptions.
*/
public static void throwException(Throwable t) {
if (hasUnsafe()) {
PlatformDependent0.throwException(t);
} else {
PlatformDependent.throwException0(t);
}
}
@SuppressWarnings("unchecked")
private static void throwException0(Throwable t) throws E {
throw (E) t;
}
/**
* Creates a new fastest {@link ConcurrentMap} implementation for the current platform.
*/
public static ConcurrentMap newConcurrentHashMap() {
return new ConcurrentHashMap();
}
/**
* Creates a new fastest {@link LongCounter} implementation for the current platform.
*/
public static LongCounter newLongCounter() {
if (javaVersion() >= 8) {
return new LongAdderCounter();
} else {
return new AtomicLongCounter();
}
}
/**
* Creates a new fastest {@link ConcurrentMap} implementation for the current platform.
*/
public static ConcurrentMap newConcurrentHashMap(int initialCapacity) {
return new ConcurrentHashMap(initialCapacity);
}
/**
* Creates a new fastest {@link ConcurrentMap} implementation for the current platform.
*/
public static ConcurrentMap newConcurrentHashMap(int initialCapacity, float loadFactor) {
return new ConcurrentHashMap(initialCapacity, loadFactor);
}
/**
* Creates a new fastest {@link ConcurrentMap} implementation for the current platform.
*/
public static ConcurrentMap newConcurrentHashMap(
int initialCapacity, float loadFactor, int concurrencyLevel) {
return new ConcurrentHashMap(initialCapacity, loadFactor, concurrencyLevel);
}
/**
* Creates a new fastest {@link ConcurrentMap} implementation for the current platform.
*/
public static ConcurrentMap newConcurrentHashMap(Map extends K, ? extends V> map) {
return new ConcurrentHashMap(map);
}
/**
* Try to deallocate the specified direct {@link ByteBuffer}. Please note this method does nothing if
* the current platform does not support this operation or the specified buffer is not a direct buffer.
*/
public static void freeDirectBuffer(ByteBuffer buffer) {
CLEANER.freeDirectBuffer(buffer);
}
public static long directBufferAddress(ByteBuffer buffer) {
return PlatformDependent0.directBufferAddress(buffer);
}
public static ByteBuffer directBuffer(long memoryAddress, int size) {
if (PlatformDependent0.hasDirectBufferNoCleanerConstructor()) {
return PlatformDependent0.newDirectBuffer(memoryAddress, size);
}
throw new UnsupportedOperationException(
"sun.misc.Unsafe or java.nio.DirectByteBuffer.(long, int) not available");
}
public static Object getObject(Object object, long fieldOffset) {
return PlatformDependent0.getObject(object, fieldOffset);
}
public static int getInt(Object object, long fieldOffset) {
return PlatformDependent0.getInt(object, fieldOffset);
}
static void safeConstructPutInt(Object object, long fieldOffset, int value) {
PlatformDependent0.safeConstructPutInt(object, fieldOffset, value);
}
public static int getIntVolatile(long address) {
return PlatformDependent0.getIntVolatile(address);
}
public static void putIntOrdered(long adddress, int newValue) {
PlatformDependent0.putIntOrdered(adddress, newValue);
}
public static byte getByte(long address) {
return PlatformDependent0.getByte(address);
}
public static short getShort(long address) {
return PlatformDependent0.getShort(address);
}
public static int getInt(long address) {
return PlatformDependent0.getInt(address);
}
public static long getLong(long address) {
return PlatformDependent0.getLong(address);
}
public static byte getByte(byte[] data, int index) {
return hasUnsafe() ? PlatformDependent0.getByte(data, index) : data[index];
}
public static byte getByte(byte[] data, long index) {
return hasUnsafe() ? PlatformDependent0.getByte(data, index) : data[toIntExact(index)];
}
public static short getShort(byte[] data, int index) {
return hasUnsafe() ? PlatformDependent0.getShort(data, index) : data[index];
}
public static int getInt(byte[] data, int index) {
return hasUnsafe() ? PlatformDependent0.getInt(data, index) : data[index];
}
public static int getInt(int[] data, long index) {
return hasUnsafe() ? PlatformDependent0.getInt(data, index) : data[toIntExact(index)];
}
public static long getLong(byte[] data, int index) {
return hasUnsafe() ? PlatformDependent0.getLong(data, index) : data[index];
}
public static long getLong(long[] data, long index) {
return hasUnsafe() ? PlatformDependent0.getLong(data, index) : data[toIntExact(index)];
}
private static int toIntExact(long value) {
if (javaVersion() >= 8) {
return toIntExact8(value);
}
int result = (int) value;
if (result != value) {
throw new ArithmeticException("Cannot convert to exact int: " + value);
}
return result;
}
@SuppressJava6Requirement(reason = "version checked")
private static int toIntExact8(long value) {
return Math.toIntExact(value);
}
private static long getLongSafe(byte[] bytes, int offset) {
if (BIG_ENDIAN_NATIVE_ORDER) {
return (long) bytes[offset] << 56 |
((long) bytes[offset + 1] & 0xff) << 48 |
((long) bytes[offset + 2] & 0xff) << 40 |
((long) bytes[offset + 3] & 0xff) << 32 |
((long) bytes[offset + 4] & 0xff) << 24 |
((long) bytes[offset + 5] & 0xff) << 16 |
((long) bytes[offset + 6] & 0xff) << 8 |
(long) bytes[offset + 7] & 0xff;
}
return (long) bytes[offset] & 0xff |
((long) bytes[offset + 1] & 0xff) << 8 |
((long) bytes[offset + 2] & 0xff) << 16 |
((long) bytes[offset + 3] & 0xff) << 24 |
((long) bytes[offset + 4] & 0xff) << 32 |
((long) bytes[offset + 5] & 0xff) << 40 |
((long) bytes[offset + 6] & 0xff) << 48 |
(long) bytes[offset + 7] << 56;
}
private static int getIntSafe(byte[] bytes, int offset) {
if (BIG_ENDIAN_NATIVE_ORDER) {
return bytes[offset] << 24 |
(bytes[offset + 1] & 0xff) << 16 |
(bytes[offset + 2] & 0xff) << 8 |
bytes[offset + 3] & 0xff;
}
return bytes[offset] & 0xff |
(bytes[offset + 1] & 0xff) << 8 |
(bytes[offset + 2] & 0xff) << 16 |
bytes[offset + 3] << 24;
}
private static short getShortSafe(byte[] bytes, int offset) {
if (BIG_ENDIAN_NATIVE_ORDER) {
return (short) (bytes[offset] << 8 | (bytes[offset + 1] & 0xff));
}
return (short) (bytes[offset] & 0xff | (bytes[offset + 1] << 8));
}
/**
* Identical to {@link PlatformDependent0#hashCodeAsciiCompute(long, int)} but for {@link CharSequence}.
*/
private static int hashCodeAsciiCompute(CharSequence value, int offset, int hash) {
if (BIG_ENDIAN_NATIVE_ORDER) {
return hash * HASH_CODE_C1 +
// Low order int
hashCodeAsciiSanitizeInt(value, offset + 4) * HASH_CODE_C2 +
// High order int
hashCodeAsciiSanitizeInt(value, offset);
}
return hash * HASH_CODE_C1 +
// Low order int
hashCodeAsciiSanitizeInt(value, offset) * HASH_CODE_C2 +
// High order int
hashCodeAsciiSanitizeInt(value, offset + 4);
}
/**
* Identical to {@link PlatformDependent0#hashCodeAsciiSanitize(int)} but for {@link CharSequence}.
*/
private static int hashCodeAsciiSanitizeInt(CharSequence value, int offset) {
if (BIG_ENDIAN_NATIVE_ORDER) {
// mimic a unsafe.getInt call on a big endian machine
return (value.charAt(offset + 3) & 0x1f) |
(value.charAt(offset + 2) & 0x1f) << 8 |
(value.charAt(offset + 1) & 0x1f) << 16 |
(value.charAt(offset) & 0x1f) << 24;
}
return (value.charAt(offset + 3) & 0x1f) << 24 |
(value.charAt(offset + 2) & 0x1f) << 16 |
(value.charAt(offset + 1) & 0x1f) << 8 |
(value.charAt(offset) & 0x1f);
}
/**
* Identical to {@link PlatformDependent0#hashCodeAsciiSanitize(short)} but for {@link CharSequence}.
*/
private static int hashCodeAsciiSanitizeShort(CharSequence value, int offset) {
if (BIG_ENDIAN_NATIVE_ORDER) {
// mimic a unsafe.getShort call on a big endian machine
return (value.charAt(offset + 1) & 0x1f) |
(value.charAt(offset) & 0x1f) << 8;
}
return (value.charAt(offset + 1) & 0x1f) << 8 |
(value.charAt(offset) & 0x1f);
}
/**
* Identical to {@link PlatformDependent0#hashCodeAsciiSanitize(byte)} but for {@link CharSequence}.
*/
private static int hashCodeAsciiSanitizeByte(char value) {
return value & 0x1f;
}
public static void putByte(long address, byte value) {
PlatformDependent0.putByte(address, value);
}
public static void putShort(long address, short value) {
PlatformDependent0.putShort(address, value);
}
public static void putInt(long address, int value) {
PlatformDependent0.putInt(address, value);
}
public static void putLong(long address, long value) {
PlatformDependent0.putLong(address, value);
}
public static void putByte(byte[] data, int index, byte value) {
PlatformDependent0.putByte(data, index, value);
}
public static void putByte(Object data, long offset, byte value) {
PlatformDependent0.putByte(data, offset, value);
}
public static void putShort(byte[] data, int index, short value) {
PlatformDependent0.putShort(data, index, value);
}
public static void putInt(byte[] data, int index, int value) {
PlatformDependent0.putInt(data, index, value);
}
public static void putLong(byte[] data, int index, long value) {
PlatformDependent0.putLong(data, index, value);
}
public static void putObject(Object o, long offset, Object x) {
PlatformDependent0.putObject(o, offset, x);
}
public static long objectFieldOffset(Field field) {
return PlatformDependent0.objectFieldOffset(field);
}
public static void copyMemory(long srcAddr, long dstAddr, long length) {
PlatformDependent0.copyMemory(srcAddr, dstAddr, length);
}
public static void copyMemory(byte[] src, int srcIndex, long dstAddr, long length) {
PlatformDependent0.copyMemory(src, BYTE_ARRAY_BASE_OFFSET + srcIndex, null, dstAddr, length);
}
public static void copyMemory(byte[] src, int srcIndex, byte[] dst, int dstIndex, long length) {
PlatformDependent0.copyMemory(src, BYTE_ARRAY_BASE_OFFSET + srcIndex,
dst, BYTE_ARRAY_BASE_OFFSET + dstIndex, length);
}
public static void copyMemory(long srcAddr, byte[] dst, int dstIndex, long length) {
PlatformDependent0.copyMemory(null, srcAddr, dst, BYTE_ARRAY_BASE_OFFSET + dstIndex, length);
}
public static void setMemory(byte[] dst, int dstIndex, long bytes, byte value) {
PlatformDependent0.setMemory(dst, BYTE_ARRAY_BASE_OFFSET + dstIndex, bytes, value);
}
public static void setMemory(long address, long bytes, byte value) {
PlatformDependent0.setMemory(address, bytes, value);
}
/**
* Allocate a new {@link ByteBuffer} with the given {@code capacity}. {@link ByteBuffer}s allocated with
* this method MUST be deallocated via {@link #freeDirectNoCleaner(ByteBuffer)}.
*/
public static ByteBuffer allocateDirectNoCleaner(int capacity) {
assert USE_DIRECT_BUFFER_NO_CLEANER;
incrementMemoryCounter(capacity);
try {
return PlatformDependent0.allocateDirectNoCleaner(capacity);
} catch (Throwable e) {
decrementMemoryCounter(capacity);
throwException(e);
return null;
}
}
/**
* Reallocate a new {@link ByteBuffer} with the given {@code capacity}. {@link ByteBuffer}s reallocated with
* this method MUST be deallocated via {@link #freeDirectNoCleaner(ByteBuffer)}.
*/
public static ByteBuffer reallocateDirectNoCleaner(ByteBuffer buffer, int capacity) {
assert USE_DIRECT_BUFFER_NO_CLEANER;
int len = capacity - buffer.capacity();
incrementMemoryCounter(len);
try {
return PlatformDependent0.reallocateDirectNoCleaner(buffer, capacity);
} catch (Throwable e) {
decrementMemoryCounter(len);
throwException(e);
return null;
}
}
/**
* This method MUST only be called for {@link ByteBuffer}s that were allocated via
* {@link #allocateDirectNoCleaner(int)}.
*/
public static void freeDirectNoCleaner(ByteBuffer buffer) {
assert USE_DIRECT_BUFFER_NO_CLEANER;
int capacity = buffer.capacity();
PlatformDependent0.freeMemory(PlatformDependent0.directBufferAddress(buffer));
decrementMemoryCounter(capacity);
}
public static boolean hasAlignDirectByteBuffer() {
return hasUnsafe() || PlatformDependent0.hasAlignSliceMethod();
}
public static ByteBuffer alignDirectBuffer(ByteBuffer buffer, int alignment) {
if (!buffer.isDirect()) {
throw new IllegalArgumentException("Cannot get aligned slice of non-direct byte buffer.");
}
if (PlatformDependent0.hasAlignSliceMethod()) {
return PlatformDependent0.alignSlice(buffer, alignment);
}
if (hasUnsafe()) {
long address = directBufferAddress(buffer);
long aligned = align(address, alignment);
buffer.position((int) (aligned - address));
return buffer.slice();
}
// We don't have enough information to be able to align any buffers.
throw new UnsupportedOperationException("Cannot align direct buffer. " +
"Needs either Unsafe or ByteBuffer.alignSlice method available.");
}
public static long align(long value, int alignment) {
return Pow2.align(value, alignment);
}
private static void incrementMemoryCounter(int capacity) {
if (DIRECT_MEMORY_COUNTER != null) {
long newUsedMemory = DIRECT_MEMORY_COUNTER.addAndGet(capacity);
if (newUsedMemory > DIRECT_MEMORY_LIMIT) {
DIRECT_MEMORY_COUNTER.addAndGet(-capacity);
throw new OutOfDirectMemoryError("failed to allocate " + capacity
+ " byte(s) of direct memory (used: " + (newUsedMemory - capacity)
+ ", max: " + DIRECT_MEMORY_LIMIT + ')');
}
}
}
private static void decrementMemoryCounter(int capacity) {
if (DIRECT_MEMORY_COUNTER != null) {
long usedMemory = DIRECT_MEMORY_COUNTER.addAndGet(-capacity);
assert usedMemory >= 0;
}
}
public static boolean useDirectBufferNoCleaner() {
return USE_DIRECT_BUFFER_NO_CLEANER;
}
/**
* Compare two {@code byte} arrays for equality. For performance reasons no bounds checking on the
* parameters is performed.
*
* @param bytes1 the first byte array.
* @param startPos1 the position (inclusive) to start comparing in {@code bytes1}.
* @param bytes2 the second byte array.
* @param startPos2 the position (inclusive) to start comparing in {@code bytes2}.
* @param length the amount of bytes to compare. This is assumed to be validated as not going out of bounds
* by the caller.
*/
public static boolean equals(byte[] bytes1, int startPos1, byte[] bytes2, int startPos2, int length) {
if (javaVersion() > 8 && (startPos2 | startPos1 | (bytes1.length - length) | bytes2.length - length) == 0) {
return Arrays.equals(bytes1, bytes2);
}
return !hasUnsafe() || !unalignedAccess() ?
equalsSafe(bytes1, startPos1, bytes2, startPos2, length) :
PlatformDependent0.equals(bytes1, startPos1, bytes2, startPos2, length);
}
/**
* Determine if a subsection of an array is zero.
* @param bytes The byte array.
* @param startPos The starting index (inclusive) in {@code bytes}.
* @param length The amount of bytes to check for zero.
* @return {@code false} if {@code bytes[startPos:startsPos+length)} contains a value other than zero.
*/
public static boolean isZero(byte[] bytes, int startPos, int length) {
return !hasUnsafe() || !unalignedAccess() ?
isZeroSafe(bytes, startPos, length) :
PlatformDependent0.isZero(bytes, startPos, length);
}
/**
* Compare two {@code byte} arrays for equality without leaking timing information.
* For performance reasons no bounds checking on the parameters is performed.
*
* The {@code int} return type is intentional and is designed to allow cascading of constant time operations:
*
* byte[] s1 = new {1, 2, 3};
* byte[] s2 = new {1, 2, 3};
* byte[] s3 = new {1, 2, 3};
* byte[] s4 = new {4, 5, 6};
* boolean equals = (equalsConstantTime(s1, 0, s2, 0, s1.length) &
* equalsConstantTime(s3, 0, s4, 0, s3.length)) != 0;
*
* @param bytes1 the first byte array.
* @param startPos1 the position (inclusive) to start comparing in {@code bytes1}.
* @param bytes2 the second byte array.
* @param startPos2 the position (inclusive) to start comparing in {@code bytes2}.
* @param length the amount of bytes to compare. This is assumed to be validated as not going out of bounds
* by the caller.
* @return {@code 0} if not equal. {@code 1} if equal.
*/
public static int equalsConstantTime(byte[] bytes1, int startPos1, byte[] bytes2, int startPos2, int length) {
return !hasUnsafe() || !unalignedAccess() ?
ConstantTimeUtils.equalsConstantTime(bytes1, startPos1, bytes2, startPos2, length) :
PlatformDependent0.equalsConstantTime(bytes1, startPos1, bytes2, startPos2, length);
}
/**
* Calculate a hash code of a byte array assuming ASCII character encoding.
* The resulting hash code will be case insensitive.
* @param bytes The array which contains the data to hash.
* @param startPos What index to start generating a hash code in {@code bytes}
* @param length The amount of bytes that should be accounted for in the computation.
* @return The hash code of {@code bytes} assuming ASCII character encoding.
* The resulting hash code will be case insensitive.
*/
public static int hashCodeAscii(byte[] bytes, int startPos, int length) {
return !hasUnsafe() || !unalignedAccess() ?
hashCodeAsciiSafe(bytes, startPos, length) :
PlatformDependent0.hashCodeAscii(bytes, startPos, length);
}
/**
* Calculate a hash code of a byte array assuming ASCII character encoding.
* The resulting hash code will be case insensitive.
*
* This method assumes that {@code bytes} is equivalent to a {@code byte[]} but just using {@link CharSequence}
* for storage. The upper most byte of each {@code char} from {@code bytes} is ignored.
* @param bytes The array which contains the data to hash (assumed to be equivalent to a {@code byte[]}).
* @return The hash code of {@code bytes} assuming ASCII character encoding.
* The resulting hash code will be case insensitive.
*/
public static int hashCodeAscii(CharSequence bytes) {
final int length = bytes.length();
final int remainingBytes = length & 7;
int hash = HASH_CODE_ASCII_SEED;
// Benchmarking shows that by just naively looping for inputs 8~31 bytes long we incur a relatively large
// performance penalty (only achieve about 60% performance of loop which iterates over each char). So because
// of this we take special provisions to unroll the looping for these conditions.
if (length >= 32) {
for (int i = length - 8; i >= remainingBytes; i -= 8) {
hash = hashCodeAsciiCompute(bytes, i, hash);
}
} else if (length >= 8) {
hash = hashCodeAsciiCompute(bytes, length - 8, hash);
if (length >= 16) {
hash = hashCodeAsciiCompute(bytes, length - 16, hash);
if (length >= 24) {
hash = hashCodeAsciiCompute(bytes, length - 24, hash);
}
}
}
if (remainingBytes == 0) {
return hash;
}
int offset = 0;
if (remainingBytes != 2 & remainingBytes != 4 & remainingBytes != 6) { // 1, 3, 5, 7
hash = hash * HASH_CODE_C1 + hashCodeAsciiSanitizeByte(bytes.charAt(0));
offset = 1;
}
if (remainingBytes != 1 & remainingBytes != 4 & remainingBytes != 5) { // 2, 3, 6, 7
hash = hash * (offset == 0 ? HASH_CODE_C1 : HASH_CODE_C2)
+ hashCodeAsciiSanitize(hashCodeAsciiSanitizeShort(bytes, offset));
offset += 2;
}
if (remainingBytes >= 4) { // 4, 5, 6, 7
return hash * ((offset == 0 | offset == 3) ? HASH_CODE_C1 : HASH_CODE_C2)
+ hashCodeAsciiSanitizeInt(bytes, offset);
}
return hash;
}
private static final class Mpsc {
private static final boolean USE_MPSC_CHUNKED_ARRAY_QUEUE;
private Mpsc() {
}
static {
Object unsafe = null;
if (hasUnsafe()) {
// jctools goes through its own process of initializing unsafe; of
// course, this requires permissions which might not be granted to calling code, so we
// must mark this block as privileged too
unsafe = AccessController.doPrivileged(new PrivilegedAction