All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.landawn.abacus.util.stream.Collectors Maven / Gradle / Ivy

There is a newer version: 1.8.1
Show newest version
/*
 * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 *
 * This code is free software; you can redistribute it and/or modify it
 * under the terms of the GNU General Public License version 2 only, as
 * published by the Free Software Foundation.  Oracle designates this
 * particular file as subject to the "Classpath" exception as provided
 * by Oracle in the LICENSE file that accompanied this code.
 *
 * This code is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 * version 2 for more details (a copy is included in the LICENSE file that
 * accompanied this code).
 *
 * You should have received a copy of the GNU General Public License version
 * 2 along with this work; if not, write to the Free Software Foundation,
 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 *
 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 * or visit www.oracle.com if you need additional information or have any
 * questions.
 */
package com.landawn.abacus.util.stream;

import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.ConcurrentModificationException;
import java.util.Deque;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;

import com.landawn.abacus.DataSet;
import com.landawn.abacus.util.Array;
import com.landawn.abacus.util.BiMap;
import com.landawn.abacus.util.BooleanList;
import com.landawn.abacus.util.ByteList;
import com.landawn.abacus.util.ByteSummaryStatistics;
import com.landawn.abacus.util.CharList;
import com.landawn.abacus.util.CharSummaryStatistics;
import com.landawn.abacus.util.DoubleList;
import com.landawn.abacus.util.DoubleSummaryStatistics;
import com.landawn.abacus.util.FloatList;
import com.landawn.abacus.util.FloatSummaryStatistics;
import com.landawn.abacus.util.Fn;
import com.landawn.abacus.util.Fn.BiConsumers;
import com.landawn.abacus.util.Fn.BinaryOperators;
import com.landawn.abacus.util.Fn.Suppliers;
import com.landawn.abacus.util.ImmutableList;
import com.landawn.abacus.util.ImmutableMap;
import com.landawn.abacus.util.ImmutableSet;
import com.landawn.abacus.util.IntList;
import com.landawn.abacus.util.IntSummaryStatistics;
import com.landawn.abacus.util.Joiner;
import com.landawn.abacus.util.ListMultimap;
import com.landawn.abacus.util.LongList;
import com.landawn.abacus.util.LongMultiset;
import com.landawn.abacus.util.LongSummaryStatistics;
import com.landawn.abacus.util.Multimap;
import com.landawn.abacus.util.Multiset;
import com.landawn.abacus.util.MutableBoolean;
import com.landawn.abacus.util.N;
import com.landawn.abacus.util.Nullable;
import com.landawn.abacus.util.OptionalDouble;
import com.landawn.abacus.util.OptionalInt;
import com.landawn.abacus.util.OptionalLong;
import com.landawn.abacus.util.Pair;
import com.landawn.abacus.util.ShortList;
import com.landawn.abacus.util.ShortSummaryStatistics;
import com.landawn.abacus.util.function.BiConsumer;
import com.landawn.abacus.util.function.BiFunction;
import com.landawn.abacus.util.function.BiPredicate;
import com.landawn.abacus.util.function.BinaryOperator;
import com.landawn.abacus.util.function.Consumer;
import com.landawn.abacus.util.function.Function;
import com.landawn.abacus.util.function.Predicate;
import com.landawn.abacus.util.function.Supplier;
import com.landawn.abacus.util.function.ToByteFunction;
import com.landawn.abacus.util.function.ToCharFunction;
import com.landawn.abacus.util.function.ToDoubleFunction;
import com.landawn.abacus.util.function.ToFloatFunction;
import com.landawn.abacus.util.function.ToIntFunction;
import com.landawn.abacus.util.function.ToLongFunction;
import com.landawn.abacus.util.function.ToShortFunction;

/**
 * Note: It's copied from OpenJDK at: http://hg.openjdk.java.net/jdk8u/hs-dev/jdk
 * 
* * Implementations of {@link Collector} that implement various useful reduction * operations, such as accumulating elements into collections, summarizing * elements according to various criteria, etc. * *

The following are examples of using the predefined collectors to perform * common mutable reduction tasks: * *

{@code
 *     // Accumulate names into a List
 *     List list = people.stream().map(Person::getName).collect(Collectors.toList());
 *
 *     // Accumulate names into a TreeSet
 *     Set set = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new));
 *
 *     // Convert elements to strings and concatenate them, separated by commas
 *     String joined = things.stream()
 *                           .map(Object::toString)
 *                           .collect(Collectors.joining(", "));
 *
 *     // Compute sum of salaries of employee
 *     int total = employees.stream()
 *                          .collect(Collectors.summingInt(Employee::getSalary)));
 *
 *     // Group employees by department
 *     Map> byDept
 *         = employees.stream()
 *                    .collect(Collectors.groupingBy(Employee::getDepartment));
 *
 *     // Compute sum of salaries by department
 *     Map totalByDept
 *         = employees.stream()
 *                    .collect(Collectors.groupingBy(Employee::getDepartment,
 *                                                   Collectors.summingInt(Employee::getSalary)));
 *
 *     // Partition students into passing and failing
 *     Map> passingFailing =
 *         students.stream()
 *                 .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD));
 *
 * }
* * @since 1.8 */ public class Collectors { static final Object NONE = new Object(); static final Set CH_CONCURRENT_ID = Collections .unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, Collector.Characteristics.UNORDERED, Collector.Characteristics.IDENTITY_FINISH)); static final Set CH_CONCURRENT_NOID = Collections .unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, Collector.Characteristics.UNORDERED)); static final Set CH_ID = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.IDENTITY_FINISH)); static final Set CH_UNORDERED_ID = Collections .unmodifiableSet(EnumSet.of(Collector.Characteristics.UNORDERED, Collector.Characteristics.IDENTITY_FINISH)); static final Set CH_UNORDERED = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.UNORDERED)); static final Set CH_NOID = Collections.emptySet(); // ============================================================== static final Function, ImmutableList> ImmutableList_Finisher = new Function, ImmutableList>() { @Override public ImmutableList apply(List t) { return ImmutableList.of(t); } }; static final Function, ImmutableSet> ImmutableSet_Finisher = new Function, ImmutableSet>() { @Override public ImmutableSet apply(Set t) { return ImmutableSet.of(t); } }; static final Function, ImmutableMap> ImmutableMap_Finisher = new Function, ImmutableMap>() { @Override public ImmutableMap apply(Map t) { return ImmutableMap.of(t); } }; static final BiConsumer, Object> Multiset_Accumulator = new BiConsumer, Object>() { @Override public void accept(Multiset c, Object t) { c.add(t); } }; static final BinaryOperator> Multiset_Combiner = new BinaryOperator>() { @Override public Multiset apply(Multiset a, Multiset b) { a.addAll(b); return a; } }; static final BiConsumer, Object> LongMultiset_Accumulator = new BiConsumer, Object>() { @Override public void accept(LongMultiset c, Object t) { c.add(t); } }; static final BinaryOperator> LongMultiset_Combiner = new BinaryOperator>() { @Override public LongMultiset apply(LongMultiset a, LongMultiset b) { a.addAll(b); return a; } }; static final BiConsumer BooleanList_Accumulator = new BiConsumer() { @Override public void accept(BooleanList c, Boolean t) { c.add(t.booleanValue()); } }; static final BinaryOperator BooleanList_Combiner = new BinaryOperator() { @Override public BooleanList apply(BooleanList a, BooleanList b) { a.addAll(b); return a; } }; static final Function BooleanArray_Finisher = new Function() { @Override public boolean[] apply(BooleanList t) { return t.trimToSize().array(); } }; static final BiConsumer CharList_Accumulator = new BiConsumer() { @Override public void accept(CharList c, Character t) { c.add(t.charValue()); } }; static final BinaryOperator CharList_Combiner = new BinaryOperator() { @Override public CharList apply(CharList a, CharList b) { a.addAll(b); return a; } }; static final Function CharArray_Finisher = new Function() { @Override public char[] apply(CharList t) { return t.trimToSize().array(); } }; static final BiConsumer ByteList_Accumulator = new BiConsumer() { @Override public void accept(ByteList c, Byte t) { c.add(t.byteValue()); } }; static final BinaryOperator ByteList_Combiner = new BinaryOperator() { @Override public ByteList apply(ByteList a, ByteList b) { a.addAll(b); return a; } }; static final Function ByteArray_Finisher = new Function() { @Override public byte[] apply(ByteList t) { return t.trimToSize().array(); } }; static final BiConsumer ShortList_Accumulator = new BiConsumer() { @Override public void accept(ShortList c, Short t) { c.add(t.shortValue()); } }; static final BinaryOperator ShortList_Combiner = new BinaryOperator() { @Override public ShortList apply(ShortList a, ShortList b) { a.addAll(b); return a; } }; static final Function ShortArray_Finisher = new Function() { @Override public short[] apply(ShortList t) { return t.trimToSize().array(); } }; static final BiConsumer IntList_Accumulator = new BiConsumer() { @Override public void accept(IntList c, Integer t) { c.add(t.intValue()); } }; static final BinaryOperator IntList_Combiner = new BinaryOperator() { @Override public IntList apply(IntList a, IntList b) { a.addAll(b); return a; } }; static final Function IntArray_Finisher = new Function() { @Override public int[] apply(IntList t) { return t.trimToSize().array(); } }; static final BiConsumer LongList_Accumulator = new BiConsumer() { @Override public void accept(LongList c, Long t) { c.add(t.longValue()); } }; static final BinaryOperator LongList_Combiner = new BinaryOperator() { @Override public LongList apply(LongList a, LongList b) { a.addAll(b); return a; } }; static final Function LongArray_Finisher = new Function() { @Override public long[] apply(LongList t) { return t.trimToSize().array(); } }; static final BiConsumer FloatList_Accumulator = new BiConsumer() { @Override public void accept(FloatList c, Float t) { c.add(t.floatValue()); } }; static final BinaryOperator FloatList_Combiner = new BinaryOperator() { @Override public FloatList apply(FloatList a, FloatList b) { a.addAll(b); return a; } }; static final Function FloatArray_Finisher = new Function() { @Override public float[] apply(FloatList t) { return t.trimToSize().array(); } }; static final BiConsumer DoubleList_Accumulator = new BiConsumer() { @Override public void accept(DoubleList c, Double t) { c.add(t.doubleValue()); } }; static final BinaryOperator DoubleList_Combiner = new BinaryOperator() { @Override public DoubleList apply(DoubleList a, DoubleList b) { a.addAll(b); return a; } }; static final Function DoubleArray_Finisher = new Function() { @Override public double[] apply(DoubleList t) { return t.trimToSize().array(); } }; static final BiConsumer StringBuilder_Accumulator = new BiConsumer() { @Override public void accept(StringBuilder a, CharSequence t) { a.append(t); } }; static final BinaryOperator StringBuilder_Combiner = new BinaryOperator() { @Override public StringBuilder apply(StringBuilder a, StringBuilder b) { a.append(b); return a; } }; static final Function StringBuilder_Finisher = new Function() { @Override public String apply(StringBuilder a) { return a.toString(); } }; static final BiConsumer Joiner_Accumulator = new BiConsumer() { @Override public void accept(Joiner a, CharSequence t) { a.append(t); } }; static final BinaryOperator Joiner_Combiner = new BinaryOperator() { @Override public Joiner apply(Joiner a, Joiner b) { a.merge(b); return a; } }; static final Function Joiner_Finisher = new Function() { @Override public String apply(Joiner a) { return a.toString(); } }; static final Function Counting_Accumulator = new Function() { @Override public Long apply(Object t) { return 1L; } }; static final BinaryOperator Counting_Combiner = new BinaryOperator() { @Override public Long apply(Long a, Long b) { return a.longValue() + b.longValue(); } }; static final Function CountingInt_Accumulator = new Function() { @Override public Integer apply(Object t) { return 1; } }; static final BinaryOperator CountingInt_Combiner = new BinaryOperator() { @Override public Integer apply(Integer a, Integer b) { return a.intValue() + b.intValue(); } }; static final Supplier SummingInt_Supplier = new Supplier() { @Override public int[] get() { return new int[1]; } }; static final BinaryOperator SummingInt_Combiner = new BinaryOperator() { @Override public int[] apply(int[] a, int[] b) { a[0] += b[0]; return a; } }; static final Function SummingInt_Finisher = new Function() { @Override public Integer apply(int[] a) { return a[0]; } }; static final Supplier SummingInt_Supplier_2 = new Supplier() { @Override public int[] get() { return new int[2]; } }; static final BinaryOperator SummingInt_Combiner_2 = new BinaryOperator() { @Override public int[] apply(int[] a, int[] b) { a[0] += b[0]; a[1] += b[1]; return a; } }; static final Function SummingInt_Finisher_2 = new Function() { @Override public OptionalInt apply(int[] a) { return a[1] == 0 ? OptionalInt.empty() : OptionalInt.of(a[0]); } }; static final Supplier SummingLong_Supplier = new Supplier() { @Override public long[] get() { return new long[1]; } }; static final BinaryOperator SummingLong_Combiner = new BinaryOperator() { @Override public long[] apply(long[] a, long[] b) { a[0] += b[0]; return a; } }; static final Function SummingLong_Finisher = new Function() { @Override public Long apply(long[] a) { return a[0]; } }; static final Supplier SummingLong_Supplier_2 = new Supplier() { @Override public long[] get() { return new long[2]; } }; static final BinaryOperator SummingLong_Combiner_2 = new BinaryOperator() { @Override public long[] apply(long[] a, long[] b) { a[0] += b[0]; a[1] += b[1]; return a; } }; static final Function SummingLong_Finisher_2 = new Function() { @Override public OptionalLong apply(long[] a) { return a[1] == 0 ? OptionalLong.empty() : OptionalLong.of(a[0]); } }; static final Supplier SummingDouble_Supplier = new Supplier() { @Override public double[] get() { return new double[3]; } }; static final BinaryOperator SummingDouble_Combiner = new BinaryOperator() { @Override public double[] apply(double[] a, double[] b) { sumWithCompensation(a, b[0]); a[2] += b[2]; return sumWithCompensation(a, b[1]); } }; static final Function SummingDouble_Finisher = new Function() { @Override public Double apply(double[] a) { return computeFinalSum(a); } }; static final Supplier SummingDouble_Supplier_2 = new Supplier() { @Override public double[] get() { return new double[4]; } }; static final BinaryOperator SummingDouble_Combiner_2 = new BinaryOperator() { @Override public double[] apply(double[] a, double[] b) { sumWithCompensation(a, b[0]); a[2] += b[2]; a[3] += b[3]; return sumWithCompensation(a, b[1]); } }; static final Function SummingDouble_Finisher_2 = new Function() { @Override public OptionalDouble apply(double[] a) { return a[3] == 0 ? OptionalDouble.empty() : OptionalDouble.of(computeFinalSum(a)); } }; static final Supplier AveragingInt_Supplier = new Supplier() { @Override public long[] get() { return new long[2]; } }; static final BinaryOperator AveragingInt_Combiner = new BinaryOperator() { @Override public long[] apply(long[] a, long[] b) { a[0] += b[0]; a[1] += b[1]; return a; } }; static final Function AveragingInt_Finisher = new Function() { @Override public Double apply(long[] a) { return a[1] == 0 ? 0d : (double) a[0] / a[1]; } }; static final Function AveragingInt_Finisher_2 = new Function() { @Override public OptionalDouble apply(long[] a) { if (a[1] == 0) { return OptionalDouble.empty(); } else { return OptionalDouble.of((double) a[0] / a[1]); } } }; static final Supplier AveragingLong_Supplier = new Supplier() { @Override public long[] get() { return new long[2]; } }; static final BinaryOperator AveragingLong_Combiner = new BinaryOperator() { @Override public long[] apply(long[] a, long[] b) { a[0] += b[0]; a[1] += b[1]; return a; } }; static final Function AveragingLong_Finisher = new Function() { @Override public Double apply(long[] a) { return a[1] == 0 ? 0d : (double) a[0] / a[1]; } }; static final Function AveragingLong_Finisher_2 = new Function() { @Override public OptionalDouble apply(long[] a) { if (a[1] == 0) { return OptionalDouble.empty(); } else { return OptionalDouble.of((double) a[0] / a[1]); } } }; static final Supplier AveragingDouble_Supplier = new Supplier() { @Override public double[] get() { return new double[4]; } }; static final BinaryOperator AveragingDouble_Combiner = new BinaryOperator() { @Override public double[] apply(double[] a, double[] b) { sumWithCompensation(a, b[0]); sumWithCompensation(a, b[1]); a[2] += b[2]; a[3] += b[3]; return a; } }; static final Function AveragingDouble_Finisher = new Function() { @Override public Double apply(double[] a) { return a[2] == 0 ? 0d : computeFinalSum(a) / a[2]; } }; static final Function AveragingDouble_Finisher_2 = new Function() { @Override public OptionalDouble apply(double[] a) { if (a[2] == 0) { return OptionalDouble.empty(); } else { return OptionalDouble.of(computeFinalSum(a) / a[2]); } } }; static final Supplier SummarizingChar_Supplier = new Supplier() { @Override public CharSummaryStatistics get() { return new CharSummaryStatistics(); } }; static final BinaryOperator SummarizingChar_Combiner = new BinaryOperator() { @Override public CharSummaryStatistics apply(CharSummaryStatistics a, CharSummaryStatistics b) { a.combine(b); return a; } }; static final Supplier SummarizingByte_Supplier = new Supplier() { @Override public ByteSummaryStatistics get() { return new ByteSummaryStatistics(); } }; static final BinaryOperator SummarizingByte_Combiner = new BinaryOperator() { @Override public ByteSummaryStatistics apply(ByteSummaryStatistics a, ByteSummaryStatistics b) { a.combine(b); return a; } }; static final Supplier SummarizingShort_Supplier = new Supplier() { @Override public ShortSummaryStatistics get() { return new ShortSummaryStatistics(); } }; static final BinaryOperator SummarizingShort_Combiner = new BinaryOperator() { @Override public ShortSummaryStatistics apply(ShortSummaryStatistics a, ShortSummaryStatistics b) { a.combine(b); return a; } }; static final Supplier SummarizingInt_Supplier = new Supplier() { @Override public IntSummaryStatistics get() { return new IntSummaryStatistics(); } }; static final BinaryOperator SummarizingInt_Combiner = new BinaryOperator() { @Override public IntSummaryStatistics apply(IntSummaryStatistics a, IntSummaryStatistics b) { a.combine(b); return a; } }; static final Supplier SummarizingLong_Supplier = new Supplier() { @Override public LongSummaryStatistics get() { return new LongSummaryStatistics(); } }; static final BinaryOperator SummarizingLong_Combiner = new BinaryOperator() { @Override public LongSummaryStatistics apply(LongSummaryStatistics a, LongSummaryStatistics b) { a.combine(b); return a; } }; static final Supplier SummarizingFloat_Supplier = new Supplier() { @Override public FloatSummaryStatistics get() { return new FloatSummaryStatistics(); } }; static final BinaryOperator SummarizingFloat_Combiner = new BinaryOperator() { @Override public FloatSummaryStatistics apply(FloatSummaryStatistics a, FloatSummaryStatistics b) { a.combine(b); return a; } }; static final Supplier SummarizingDouble_Supplier = new Supplier() { @Override public DoubleSummaryStatistics get() { return new DoubleSummaryStatistics(); } }; static final BinaryOperator SummarizingDouble_Combiner = new BinaryOperator() { @Override public DoubleSummaryStatistics apply(DoubleSummaryStatistics a, DoubleSummaryStatistics b) { a.combine(b); return a; } }; static final Function Reducing_Finisher_0 = new Function() { @Override public Object apply(Object[] a) { return a[0]; } }; static final BiConsumer, Object> Reducing_Accumulator = new BiConsumer, Object>() { @Override public void accept(OptionalBox a, Object t) { a.accept(t); } }; static final BinaryOperator> Reducing_Combiner = new BinaryOperator>() { @Override public OptionalBox apply(OptionalBox a, OptionalBox b) { if (b.present) { a.accept(b.value); } return a; } }; static final Function, Nullable> Reducing_Finisher = new Function, Nullable>() { @Override public Nullable apply(OptionalBox a) { return a.present ? Nullable.of(a.value) : (Nullable) Nullable.empty(); } }; static final BiConsumer, Object> Reducing_Accumulator_2 = new BiConsumer, Object>() { @Override public void accept(OptionalBox2 a, Object t) { a.accept(t); } }; static final BinaryOperator> Reducing_Combiner_2 = new BinaryOperator>() { @Override public OptionalBox2 apply(OptionalBox2 a, OptionalBox2 b) { if (b.present) { if (a.present) { a.value = a.op.apply(a.value, b.value); } else { a.value = b.value; a.present = true; } } return a; } }; static final Function, Nullable> Reducing_Finisher_2 = new Function, Nullable>() { @Override public Nullable apply(OptionalBox2 a) { return a.present ? Nullable.of(a.value) : (Nullable) Nullable.empty(); } }; // ============================================================== Collectors() { } /** * Simple implementation class for {@code Collector}. * * @param the type of elements to be collected * @param the type of the result */ static class CollectorImpl implements Collector { private static final Function IDENTITY_FINISHER = new Function() { @Override public Object apply(Object t) { return t; } }; private final Supplier supplier; private final BiConsumer accumulator; private final BinaryOperator combiner; private final Function finisher; private final Set characteristics; CollectorImpl(Supplier supplier, BiConsumer accumulator, BinaryOperator combiner, Set characteristics) { this(supplier, accumulator, combiner, (Function) IDENTITY_FINISHER, characteristics); } CollectorImpl(Supplier supplier, BiConsumer accumulator, BinaryOperator combiner, Function finisher, Set characteristics) { this.supplier = supplier; this.accumulator = accumulator; this.combiner = combiner; this.finisher = finisher; this.characteristics = characteristics; } @Override public BiConsumer accumulator() { return accumulator; } @Override public Supplier supplier() { return supplier; } @Override public BinaryOperator combiner() { return combiner; } @Override public Function finisher() { return finisher; } @Override public Set characteristics() { return characteristics; } } /** * Returns a {@code Collector} that accumulates the input elements into a * new {@code Collection}, in encounter order. The {@code Collection} is * created by the provided factory. * * @param the type of the input elements * @param the type of the resulting {@code Collection} * @param collectionFactory a {@code Supplier} which returns a new, empty * {@code Collection} of the appropriate type * @return a {@code Collector} which collects all the input elements into a * {@code Collection}, in encounter order */ public static > Collector toCollection(Supplier collectionFactory) { final BiConsumer accumulator = BiConsumers.ofAdd(); final BinaryOperator combiner = BinaryOperators. ofAddAll(); return new CollectorImpl<>(collectionFactory, accumulator, combiner, collectionFactory.get() instanceof Set ? CH_UNORDERED_ID : CH_ID); } /** * Returns a {@code Collector} that accumulates the input elements into a * new {@code List}. There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code List} returned; if more * control over the returned {@code List} is required, use {@link #toCollection(Supplier)}. * * @param the type of the input elements * @return a {@code Collector} which collects all the input elements into a * {@code List}, in encounter order */ public static Collector> toList() { final Supplier> supplier = Suppliers. ofList(); return toCollection(supplier); } public static Collector> toLinkedList() { final Supplier> supplier = Suppliers. ofLinkedList(); return toCollection(supplier); } public static Collector> toImmutableList() { final Collector> downstream = toList(); @SuppressWarnings("rawtypes") final Function, ImmutableList> finisher = (Function) ImmutableList_Finisher; return collectingAndThen(downstream, finisher); } /** * Returns a {@code Collector} that accumulates the input elements into a * new {@code Set}. There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code Set} returned; if more * control over the returned {@code Set} is required, use * {@link #toCollection(Supplier)}. * *

This is an {@link Collector.Characteristics#UNORDERED unordered} * Collector. * * @param the type of the input elements * @return a {@code Collector} which collects all the input elements into a * {@code Set} */ public static Collector> toSet() { final Supplier> supplier = Suppliers. ofSet(); return toCollection(supplier); } public static Collector> toLinkedHashSet() { final Supplier> supplier = Suppliers. ofLinkedHashSet(); return toCollection(supplier); } public static Collector> toImmutableSet() { final Collector> downstream = toSet(); @SuppressWarnings("rawtypes") final Function, ImmutableSet> finisher = (Function) ImmutableSet_Finisher; return collectingAndThen(downstream, finisher); } public static Collector> toQueue() { final Supplier> supplier = Suppliers. ofQueue(); return toCollection(supplier); } public static Collector> toDeque() { final Supplier> supplier = Suppliers. ofDeque(); return toCollection(supplier); } public static > Collector toCollection(final Supplier collectionFactory, final int atMostSize) { final BiConsumer accumulator = new BiConsumer() { @Override public void accept(C c, T t) { if (c.size() < atMostSize) { c.add(t); } } }; final BinaryOperator combiner = new BinaryOperator() { @Override public C apply(C a, C b) { if (a.size() < atMostSize) { final int n = atMostSize - a.size(); if (b.size() <= n) { a.addAll(b); } else { if (b instanceof List) { a.addAll(((List) b).subList(0, n)); } else { final Iterator iter = b.iterator(); for (int i = 0; i < n; i++) { a.add(iter.next()); } } } } return a; } }; return new CollectorImpl<>(collectionFactory, accumulator, combiner, collectionFactory.get() instanceof Set ? CH_UNORDERED_ID : CH_ID); } public static Collector> toList(final int atMostSize) { final Supplier> supplier = new Supplier>() { @Override public List get() { return new ArrayList(N.min(256, atMostSize)); } }; return toCollection(supplier, atMostSize); } public static Collector> toSet(final int atMostSize) { final Supplier> supplier = new Supplier>() { @Override public Set get() { return new HashSet(N.initHashCapacity(N.min(256, atMostSize))); } }; return toCollection(supplier, atMostSize); } public static Collector> toMultiset() { final Supplier> supplier = Suppliers.ofMultiset(); return toMultiset(supplier); } @SuppressWarnings("rawtypes") public static Collector> toMultiset(Supplier> supplier) { final BiConsumer, T> accumulator = (BiConsumer) Multiset_Accumulator; final BinaryOperator> combiner = (BinaryOperator) Multiset_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_UNORDERED_ID); } public static Collector> toLongMultiset() { final Supplier> supplier = Suppliers.ofLongMultiset(); return toLongMultiset(supplier); } @SuppressWarnings("rawtypes") public static Collector> toLongMultiset(Supplier> supplier) { final BiConsumer, T> accumulator = (BiConsumer) LongMultiset_Accumulator; final BinaryOperator> combiner = (BinaryOperator) LongMultiset_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_UNORDERED_ID); } public static Collector toArray() { return toArray(Fn.Suppliers.ofEmptyObjectArray()); } public static Collector toArray(final Supplier arraySupplier) { final Supplier> supplier = Suppliers. ofList(); @SuppressWarnings("rawtypes") final BiConsumer, T> accumulator = (BiConsumer) BiConsumers.ofAdd(); final BinaryOperator> combiner = BinaryOperators.> ofAddAll(); final Function, A[]> finisher = new Function, A[]>() { @Override public A[] apply(List t) { final A[] a = arraySupplier.get(); if (a.length >= t.size()) { return t.toArray(a); } else { return t.toArray((A[]) Array.newInstance(a.getClass().getComponentType(), t.size())); } } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toBooleanList() { final Supplier supplier = Suppliers.ofBooleanList(); final BiConsumer accumulator = BooleanList_Accumulator; final BinaryOperator combiner = BooleanList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toBooleanArray() { final Supplier supplier = Suppliers.ofBooleanList(); final BiConsumer accumulator = BooleanList_Accumulator; final BinaryOperator combiner = BooleanList_Combiner; final Function finisher = BooleanArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toCharList() { final Supplier supplier = Suppliers.ofCharList(); final BiConsumer accumulator = CharList_Accumulator; final BinaryOperator combiner = CharList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toCharArray() { final Supplier supplier = Suppliers.ofCharList(); final BiConsumer accumulator = CharList_Accumulator; final BinaryOperator combiner = CharList_Combiner; final Function finisher = CharArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toByteList() { final Supplier supplier = Suppliers.ofByteList(); final BiConsumer accumulator = ByteList_Accumulator; final BinaryOperator combiner = ByteList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toByteArray() { final Supplier supplier = Suppliers.ofByteList(); final BiConsumer accumulator = ByteList_Accumulator; final BinaryOperator combiner = ByteList_Combiner; final Function finisher = ByteArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toShortList() { final Supplier supplier = Suppliers.ofShortList(); final BiConsumer accumulator = ShortList_Accumulator; final BinaryOperator combiner = ShortList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toShortArray() { final Supplier supplier = Suppliers.ofShortList(); final BiConsumer accumulator = ShortList_Accumulator; final BinaryOperator combiner = ShortList_Combiner; final Function finisher = ShortArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toIntList() { final Supplier supplier = Suppliers.ofIntList(); final BiConsumer accumulator = IntList_Accumulator; final BinaryOperator combiner = IntList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toIntArray() { final Supplier supplier = Suppliers.ofIntList(); final BiConsumer accumulator = IntList_Accumulator; final BinaryOperator combiner = IntList_Combiner; final Function finisher = IntArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toLongList() { final Supplier supplier = Suppliers.ofLongList(); final BiConsumer accumulator = LongList_Accumulator; final BinaryOperator combiner = LongList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toLongArray() { final Supplier supplier = Suppliers.ofLongList(); final BiConsumer accumulator = LongList_Accumulator; final BinaryOperator combiner = LongList_Combiner; final Function finisher = LongArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toFloatList() { final Supplier supplier = Suppliers.ofFloatList(); final BiConsumer accumulator = FloatList_Accumulator; final BinaryOperator combiner = FloatList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toFloatArray() { final Supplier supplier = Suppliers.ofFloatList(); final BiConsumer accumulator = FloatList_Accumulator; final BinaryOperator combiner = FloatList_Combiner; final Function finisher = FloatArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector toDoubleList() { final Supplier supplier = Suppliers.ofDoubleList(); final BiConsumer accumulator = DoubleList_Accumulator; final BinaryOperator combiner = DoubleList_Combiner; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } public static Collector toDoubleArray() { final Supplier supplier = Suppliers.ofDoubleList(); final BiConsumer accumulator = DoubleList_Accumulator; final BinaryOperator combiner = DoubleList_Combiner; final Function finisher = DoubleArray_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * * @param n * @return * @throws UnsupportedOperationException it's used in parallel stream. */ public static Collector> last(final int n) { N.checkArgument(n >= 0, "'n' can't be negative"); final Supplier> supplier = new Supplier>() { private volatile boolean isCalled = false; @Override public Deque get() { if (isCalled) { throw new UnsupportedOperationException("The 'last' Collector only can be used in sequential stream"); } isCalled = true; return n <= 1024 ? new ArrayDeque(n) : new LinkedList(); } }; final BiConsumer, T> accumulator = new BiConsumer, T>() { @Override public void accept(Deque dqueue, T t) { if (n > 0) { if (dqueue.size() >= n) { dqueue.pollFirst(); } dqueue.offerLast(t); } } }; final BinaryOperator> combiner = new BinaryOperator>() { @Override public Deque apply(Deque a, Deque b) { if (N.notNullOrEmpty(a) && N.notNullOrEmpty(b)) { throw new UnsupportedOperationException("The 'last' Collector only can be used in sequential stream"); } while (b.size() < n && !a.isEmpty()) { b.addFirst(a.pollLast()); } return b; } }; final Function, List> finisher = new Function, List>() { @Override public List apply(Deque dqueue) { return new ArrayList<>(dqueue); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Returns a {@code Collector} that concatenates the input elements into a * {@code String}, in encounter order. * * @return a {@code Collector} that concatenates the input elements into a * {@code String}, in encounter order */ public static Collector joining() { final Supplier supplier = Suppliers.ofStringBuilder(); final BiConsumer accumulator = StringBuilder_Accumulator; final BinaryOperator combiner = StringBuilder_Combiner; final Function finisher = StringBuilder_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Returns a {@code Collector} that concatenates the input elements, * separated by the specified delimiter, in encounter order. * * @param delimiter the delimiter to be used between each element * @return A {@code Collector} which concatenates CharSequence elements, * separated by the specified delimiter, in encounter order */ public static Collector joining(CharSequence delimiter) { return joining(delimiter, "", ""); } /** * Returns a {@code Collector} that concatenates the input elements, * separated by the specified delimiter, with the specified prefix and * suffix, in encounter order. * * @param delimiter the delimiter to be used between each element * @param prefix the sequence of characters to be used at the beginning * of the joined result * @param suffix the sequence of characters to be used at the end * of the joined result * @return A {@code Collector} which concatenates CharSequence elements, * separated by the specified delimiter, in encounter order */ public static Collector joining(final CharSequence delimiter, final CharSequence prefix, final CharSequence suffix) { final Supplier supplier = new Supplier() { @Override public Joiner get() { return Joiner.with(delimiter, prefix, suffix); } }; final BiConsumer accumulator = Joiner_Accumulator; final BinaryOperator combiner = Joiner_Combiner; final Function finisher = Joiner_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which filters input elements by the supplied * predicate, collecting them to the list. * *

* This method behaves like * {@code filtering(predicate, Collectors.toList())}. * *

* There are no guarantees on the type, mutability, serializability, or * thread-safety of the {@code List} returned. * * @param the type of the input elements * @param predicate a filter function to be applied to the input elements * @return a collector which applies the predicate to the input elements and * collects the elements for which predicate returned true to the * {@code List} * @see #filtering(Predicate, Collector) * @since 0.6.0 */ public static Collector> filtering(Predicate predicate) { final Collector> downstream = Collectors.toList(); return filtering(predicate, downstream); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which passes only those elements to the * specified downstream collector which match given predicate. * *

* This method returns a * short-circuiting * collector if downstream collector is short-circuiting. * *

* The operation performed by the returned collector is equivalent to * {@code stream.filter(predicate).collect(downstream)}. This collector is * mostly useful as a downstream collector in cascaded operation involving * {@link #pairing(Collector, Collector, BiFunction)} collector. * *

* This method is similar to {@code Collectors.filtering} method which * appears in JDK 9. However when downstream collector is * short-circuiting * , this method will also return a short-circuiting collector. * * @param the type of the input elements * @param intermediate accumulation type of the downstream collector * @param result type of collector * @param predicate a filter function to be applied to the input elements * @param downstream a collector which will accept filtered values * @return a collector which applies the predicate to the input elements and * provides the elements for which predicate returned true to the * downstream collector * @see #pairing(Collector, Collector, BiFunction) * @since 0.4.0 */ public static Collector filtering(final Predicate predicate, final Collector downstream) { final BiConsumer downstreamAccumulator = downstream.accumulator(); final BiConsumer accumulator = new BiConsumer() { @Override public void accept(A a, T t) { if (predicate.test(t)) { downstreamAccumulator.accept(a, t); } } }; return new CollectorImpl<>(downstream.supplier(), accumulator, downstream.combiner(), downstream.finisher(), downstream.characteristics()); } public static Collector> mapping(Function mapper) { final Collector> downstream = Collectors.toList(); return Collectors.mapping(mapper, downstream); } /** * Adapts a {@code Collector} accepting elements of type {@code U} to one * accepting elements of type {@code T} by applying a mapping function to * each input element before accumulation. * * @apiNote * The {@code mapping()} collectors are most useful when used in a * multi-level reduction, such as downstream of a {@code groupingBy} or * {@code partitioningBy}. For example, given a stream of * {@code Person}, to accumulate the set of last names in each city: *

{@code
     *     Map> lastNamesByCity
     *         = people.stream().collect(groupingBy(Person::getCity,
     *                                              mapping(Person::getLastName, toSet())));
     * }
* * @param the type of the input elements * @param type of elements accepted by downstream collector * @param intermediate accumulation type of the downstream collector * @param result type of collector * @param mapper a function to be applied to the input elements * @param downstream a collector which will accept mapped values * @return a collector which applies the mapping function to the input * elements and provides the mapped results to the downstream collector */ public static Collector mapping(final Function mapper, final Collector downstream) { final BiConsumer downstreamAccumulator = downstream.accumulator(); final BiConsumer accumulator = new BiConsumer() { @Override public void accept(A a, T t) { downstreamAccumulator.accept(a, mapper.apply(t)); } }; return new CollectorImpl<>(downstream.supplier(), accumulator, downstream.combiner(), downstream.finisher(), downstream.characteristics()); } public static Collector> flatMapping(Function> mapper) { final Collector> downstream = Collectors.toList(); return flatMapping(mapper, downstream); } public static Collector flatMapping(final Function> mapper, Collector downstream) { final BiConsumer downstreamAccumulator = downstream.accumulator(); final BiConsumer accumulator = new BiConsumer() { @Override public void accept(final A a, final T t) { try (Stream stream = mapper.apply(t)) { stream.forEach(new Consumer() { @Override public void accept(U u) { downstreamAccumulator.accept(a, u); } }); } } }; return new CollectorImpl<>(downstream.supplier(), accumulator, downstream.combiner(), downstream.finisher(), downstream.characteristics()); } /** * Adapts a {@code Collector} to perform an additional finishing * transformation. For example, one could adapt the {@link #toList()} * collector to always produce an immutable list with: *
{@code
     *     List people
     *         = people.stream().collect(collectingAndThen(toList(), Collections::unmodifiableList));
     * }
* * @param the type of the input elements * @param
intermediate accumulation type of the downstream collector * @param result type of the downstream collector * @param result type of the resulting collector * @param downstream a collector * @param finisher a function to be applied to the final result of the downstream collector * @return a collector which performs the action of the downstream collector, * followed by an additional finishing step */ public static Collector collectingAndThen(final Collector downstream, final Function finisher) { Objects.requireNonNull(finisher); final Function downstreamFinisher = downstream.finisher(); final Function thenFinisher = new Function() { @Override public RR apply(A t) { return finisher.apply(downstreamFinisher.apply(t)); } }; Set characteristics = downstream.characteristics(); if (characteristics.contains(Collector.Characteristics.IDENTITY_FINISH)) { if (characteristics.size() == 1) characteristics = Collectors.CH_NOID; else { characteristics = EnumSet.copyOf(characteristics); characteristics.remove(Collector.Characteristics.IDENTITY_FINISH); characteristics = Collections.unmodifiableSet(characteristics); } } return new CollectorImpl<>(downstream.supplier(), downstream.accumulator(), downstream.combiner(), thenFinisher, characteristics); } /** * Returns a {@code Collector} which collects into the {@link List} the * input elements for which given mapper function returns distinct results. * *

* For ordered source the order of collected elements is preserved. If the * same result is returned by mapper function for several elements, only the * first element is included into the resulting list. * *

* There are no guarantees on the type, mutability, serializability, or * thread-safety of the {@code List} returned. * *

* The operation performed by the returned collector is equivalent to * {@code stream.distinct(mapper).toList()}, but may work faster. * * @param the type of the input elements * @param mapper a function which classifies input elements. * @return a collector which collects distinct elements to the {@code List}. * @since 0.3.8 */ public static Collector> distinctBy(final Function mapper) { @SuppressWarnings("rawtypes") final Supplier> supplier = (Supplier) Suppliers. ofLinkedHashMap(); final BiConsumer, T> accumulator = new BiConsumer, T>() { @Override public void accept(Map a, T t) { final Object key = mapper.apply(t); if (a.containsKey(key) == false) { a.put(key, t); } } }; final BinaryOperator> combiner = new BinaryOperator>() { @Override public Map apply(Map a, Map b) { for (Map.Entry entry : b.entrySet()) { if (a.containsKey(entry.getKey()) == false) { a.put(entry.getKey(), entry.getValue()); } } return a; } }; final Function, List> finisher = new Function, List>() { @Override public List apply(Map a) { return new ArrayList<>(a.values()); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which counts a number of distinct values the * mapper function returns for the stream elements. * *

* The operation performed by the returned collector is equivalent to * {@code stream.map(mapper).distinct().count()}. This collector is mostly * useful as a downstream collector. * * @param the type of the input elements * @param mapper a function which classifies input elements. * @return a collector which counts a number of distinct classes the mapper * function returns for the stream elements. */ public static Collector distinctCount(Function mapper) { final Collector> downstream = Collectors.toSet(); final Function, Integer> finisher = new Function, Integer>() { @Override public Integer apply(Set t) { return t.size(); } }; return Collectors.collectingAndThen(Collectors.mapping(mapper, downstream), finisher); } /** * Returns a {@code Collector} accepting elements of type {@code T} that * counts the number of input elements. If no elements are present, the * result is 0. * * @implSpec * This produces a result equivalent to: *
{@code
     *     reducing(0L, e -> 1L, Long::sum)
     * }
* * @param the type of the input elements * @return a {@code Collector} that counts the input elements */ public static Collector counting() { final Function accumulator = Counting_Accumulator; final BinaryOperator combiner = Counting_Combiner; return reducing(0L, accumulator, combiner); } public static Collector countingInt() { final Function accumulator = CountingInt_Accumulator; final BinaryOperator combiner = CountingInt_Combiner; return reducing(0, accumulator, combiner); } @SuppressWarnings("rawtypes") public static Collector> min() { return minBy(Fn.naturalOrder()); } /** * Returns a {@code Collector} that produces the minimal element according * to a given {@code Comparator}, described as an {@code Nullable}. * * @implSpec * This produces a result equivalent to: *
{@code
     *     reducing(BinaryOperator.minBy(comparator))
     * }
* * @param the type of the input elements * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the minimal value */ public static Collector> minBy(final Comparator comparator) { Objects.requireNonNull(comparator); final BinaryOperator op = new BinaryOperator() { @Override public T apply(T a, T b) { return comparator.compare(a, b) <= 0 ? a : b; } }; return reducing(op); } public static Collector minByOrGet(final Comparator comparator, final Supplier other) { Objects.requireNonNull(comparator); final BinaryOperator op = new BinaryOperator() { @Override public T apply(T a, T b) { return comparator.compare(a, b) <= 0 ? a : b; } }; return reducingOrGet(op, other); } public static Collector minByOrThrow(final Comparator comparator, final Supplier exceptionSupplier) { Objects.requireNonNull(comparator); final BinaryOperator op = new BinaryOperator() { @Override public T apply(T a, T b) { return comparator.compare(a, b) <= 0 ? a : b; } }; return reducingOrThrow(op, exceptionSupplier); } @SuppressWarnings("rawtypes") public static Collector> max() { return maxBy(Fn.naturalOrder()); } /** * Returns a {@code Collector} that produces the maximal element according * to a given {@code Comparator}, described as an {@code Nullable}. * * @implSpec * This produces a result equivalent to: *
{@code
     *     reducing(BinaryOperator.maxBy(comparator))
     * }
* * @param the type of the input elements * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the maximal value */ public static Collector> maxBy(final Comparator comparator) { Objects.requireNonNull(comparator); final BinaryOperator op = new BinaryOperator() { @Override public T apply(T a, T b) { return comparator.compare(a, b) >= 0 ? a : b; } }; return reducing(op); } public static Collector maxByOrGet(final Comparator comparator, final Supplier other) { Objects.requireNonNull(comparator); final BinaryOperator op = new BinaryOperator() { @Override public T apply(T a, T b) { return comparator.compare(a, b) >= 0 ? a : b; } }; return reducingOrGet(op, other); } public static Collector maxByOrThrow(final Comparator comparator, final Supplier exceptionSupplier) { Objects.requireNonNull(comparator); final BinaryOperator op = new BinaryOperator() { @Override public T apply(T a, T b) { return comparator.compare(a, b) >= 0 ? a : b; } }; return reducingOrThrow(op, exceptionSupplier); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* Returns a {@code Collector} which aggregates the results of two supplied * collectors using the supplied finisher function. * *

* This method returns a * short-circuiting * collector if both downstream collectors are short-circuiting. The * collection might stop when both downstream collectors report that the * collection is complete. * * @param the type of the input elements * @param the intermediate accumulation type of the first collector * @param the intermediate accumulation type of the second collector * @param the result type of the first collector * @param the result type of the second collector * @param the final result type * @param c1 the first collector * @param c2 the second collector * @param finisher the function which merges two results into the single * one. * @return a {@code Collector} which aggregates the results of two supplied * collectors. */ public static Collector pairing(final Collector c1, final Collector c2, final BiFunction finisher) { final Supplier c1Supplier = c1.supplier(); final Supplier c2Supplier = c2.supplier(); final BiConsumer c1Accumulator = c1.accumulator(); final BiConsumer c2Accumulator = c2.accumulator(); final BinaryOperator c1Combiner = c1.combiner(); final BinaryOperator c2combiner = c2.combiner(); final Supplier> supplier = new Supplier>() { @Override public Pair get() { return Pair.of(c1Supplier.get(), c2Supplier.get()); } }; final BiConsumer, T> accumulator = new BiConsumer, T>() { @Override public void accept(Pair t, T u) { c1Accumulator.accept(t.left, u); c2Accumulator.accept(t.right, u); } }; final BinaryOperator> combiner = new BinaryOperator>() { @Override public Pair apply(Pair t, Pair u) { t.left = c1Combiner.apply(t.left, u.left); t.right = c2combiner.apply(t.right, u.right); return t; } }; final Function, R> resFinisher = new Function, R>() { @Override public R apply(Pair t) { final R1 r1 = c1.finisher().apply(t.left); final R2 r2 = c2.finisher().apply(t.right); return finisher.apply(r1, r2); } }; return new CollectorImpl<>(supplier, accumulator, combiner, resFinisher, CH_NOID); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds the minimal and maximal element * according to the supplied comparator, then applies finisher function to * them producing the final result. * *

* This collector produces stable result for ordered stream: if several * minimal or maximal elements appear, the collector always selects the * first encountered. * *

* If there are no input elements, the finisher method is not called and * empty {@code Optional} is returned. Otherwise the finisher result is * wrapped into {@code Optional}. * * @param the type of the input elements * @param the type of the result wrapped into {@code Optional} * @param comparator comparator which is used to find minimal and maximal * element * @param finisher a {@link BiFunction} which takes minimal and maximal * element and produces the final result. * @return a {@code Collector} which finds minimal and maximal elements. */ public static Collector minMax(final Comparator comparator, final BiFunction, ? super Nullable, ? extends R> finisher) { return pairing(Collectors.minBy(comparator), Collectors.maxBy(comparator), finisher); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the * specified {@link Comparator}. The found elements are reduced using the * specified downstream {@code Collector}. * * @param the type of the input elements * @param the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param comparator a {@code Comparator} to compare the elements * @param downstream a {@code Collector} implementing the downstream * reduction * @return a {@code Collector} which finds all the maximal elements. * @see #maxAll(Comparator) * @see #maxAll(Collector) * @see #maxAll() */ public static Collector maxAll(final Comparator comparator, final Collector downstream) { final Supplier downstreamSupplier = downstream.supplier(); final BiConsumer downstreamAccumulator = downstream.accumulator(); final BinaryOperator downstreamCombiner = downstream.combiner(); final MutableBoolean isCollection = MutableBoolean.of(false); final Supplier> supplier = new Supplier>() { @SuppressWarnings("rawtypes") @Override public Pair get() { final A c = downstreamSupplier.get(); if (c instanceof Collection && ((Collection) c).size() == 0) { try { ((Collection) c).clear(); isCollection.setTrue(); } catch (Throwable e) { // ignore } } return Pair.of(c, (T) none()); } }; final BiConsumer, T> accumulator = new BiConsumer, T>() { @SuppressWarnings("rawtypes") @Override public void accept(Pair t, T u) { if (t.right == NONE) { downstreamAccumulator.accept(t.left, u); t.right = u; } else { final int cmp = comparator.compare(u, t.right); if (cmp > 0) { if (isCollection.isTrue()) { ((Collection) t.left).clear(); } else { t.left = downstreamSupplier.get(); } t.right = u; } if (cmp >= 0) { downstreamAccumulator.accept(t.left, u); } } } }; final BinaryOperator> combiner = new BinaryOperator>() { @Override public Pair apply(Pair t, Pair u) { if (u.right == NONE) { return t; } else if (t.right == NONE) { return u; } final int cmp = comparator.compare(t.right, u.right); if (cmp > 0) { return t; } else if (cmp < 0) { return u; } t.left = downstreamCombiner.apply(t.left, u.left); return t; } }; final Function, D> finisher = new Function, D>() { @Override public D apply(Pair t) { return downstream.finisher().apply(t.left); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } @SuppressWarnings("unchecked") static T none() { return (T) NONE; } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the * specified {@link Comparator}. The found elements are collected to * {@link List}. * * @param the type of the input elements * @param comparator a {@code Comparator} to compare the elements * @return a {@code Collector} which finds all the maximal elements and * collects them to the {@code List}. * @see #maxAll(Comparator, Collector) * @see #maxAll() */ public static Collector> maxAll(Comparator comparator) { return maxAll(comparator, Integer.MAX_VALUE); } /** * * @param comparator * @param atMostSize * @return */ public static Collector> maxAll(final Comparator comparator, final int atMostSize) { final Supplier, T>> supplier = new Supplier, T>>() { @Override public Pair, T> get() { final List list = new ArrayList(Math.min(16, atMostSize)); return Pair.of(list, (T) NONE); } }; final BiConsumer, T>, T> accumulator = new BiConsumer, T>, T>() { @Override public void accept(Pair, T> acc, T t) { if (acc.right == NONE) { if (acc.left.size() < atMostSize) { acc.left.add(t); } acc.right = t; } else { int cmp = comparator.compare(t, acc.right); if (cmp < 0) { acc.left.clear(); acc.right = t; } if (cmp <= 0) { if (acc.left.size() < atMostSize) { acc.left.add(t); } } } } }; final BinaryOperator, T>> combiner = new BinaryOperator, T>>() { @Override public Pair, T> apply(Pair, T> acc1, Pair, T> acc2) { if (acc2.right == NONE) { return acc1; } else if (acc1.right == NONE) { return acc2; } int cmp = comparator.compare(acc1.right, acc2.right); if (cmp < 0) { return acc1; } else if (cmp > 0) { return acc2; } if (acc1.left.size() < atMostSize) { if (acc2.left.size() <= atMostSize - acc1.left.size()) { acc1.left.addAll(acc2.left); } else { acc1.left.addAll(acc2.left.subList(0, atMostSize - acc1.left.size())); } } return acc1; } }; final Function, T>, List> finisher = new Function, T>, List>() { @Override public List apply(Pair, T> acc) { return acc.left; } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the natural * order. The found elements are reduced using the specified downstream * {@code Collector}. * * @param the type of the input elements * @param
the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param downstream a {@code Collector} implementing the downstream * reduction * @return a {@code Collector} which finds all the maximal elements. * @see #maxAll(Comparator, Collector) * @see #maxAll(Comparator) * @see #maxAll() */ @SuppressWarnings("rawtypes") public static Collector maxAll(Collector downstream) { return maxAll(Fn.naturalOrder(), downstream); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the natural * order. The found elements are collected to {@link List}. * * @param the type of the input elements * @return a {@code Collector} which finds all the maximal elements and * collects them to the {@code List}. * @see #maxAll(Comparator) * @see #maxAll(Collector) */ @SuppressWarnings("rawtypes") public static Collector> maxAll() { return maxAll(Fn.naturalOrder()); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the * specified {@link Comparator}. The found elements are reduced using the * specified downstream {@code Collector}. * * @param the type of the input elements * @param
the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param comparator a {@code Comparator} to compare the elements * @param downstream a {@code Collector} implementing the downstream * reduction * @return a {@code Collector} which finds all the minimal elements. * @see #minAll(Comparator) * @see #minAll(Collector) * @see #minAll() */ public static Collector minAll(Comparator comparator, Collector downstream) { return maxAll(Fn.reversedOrder(comparator), downstream); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the * specified {@link Comparator}. The found elements are collected to * {@link List}. * * @param the type of the input elements * @param comparator a {@code Comparator} to compare the elements * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @see #minAll(Comparator, Collector) * @see #minAll() */ public static Collector> minAll(Comparator comparator) { return maxAll(Fn.reversedOrder(comparator)); } /** * * @param comparator * @param atMostSize * @return */ public static Collector> minAll(Comparator comparator, int atMostSize) { return maxAll(Fn.reversedOrder(comparator), atMostSize); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the natural * order. The found elements are reduced using the specified downstream * {@code Collector}. * * @param the type of the input elements * @param
the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param downstream a {@code Collector} implementing the downstream * reduction * @return a {@code Collector} which finds all the minimal elements. * @see #minAll(Comparator, Collector) * @see #minAll(Comparator) * @see #minAll() */ @SuppressWarnings("rawtypes") public static Collector minAll(Collector downstream) { return minAll(Fn.naturalOrder(), downstream); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the natural * order. The found elements are collected to {@link List}. * * @param the type of the input elements * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @see #minAll(Comparator) * @see #minAll(Collector) */ @SuppressWarnings("rawtypes") public static Collector> minAll() { return minAll(Fn.naturalOrder()); } /** * Returns a {@code Collector} that produces the sum of a integer-valued * function applied to the input elements. If no elements are present, * the result is 0. * * @param the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ public static Collector summingInt(final ToIntFunction mapper) { final Supplier supplier = SummingInt_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(int[] a, T t) { a[0] += mapper.applyAsInt(t); } }; final BinaryOperator combiner = SummingInt_Combiner; final Function finisher = SummingInt_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector summingInt2(final ToIntFunction mapper) { final Supplier supplier = SummingInt_Supplier_2; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(int[] a, T t) { a[0] += mapper.applyAsInt(t); } }; final BinaryOperator combiner = SummingInt_Combiner_2; final Function finisher = SummingInt_Finisher_2; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Returns a {@code Collector} that produces the sum of a long-valued * function applied to the input elements. If no elements are present, * the result is 0. * * @param the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ public static Collector summingLong(final ToLongFunction mapper) { final Supplier supplier = SummingLong_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(long[] a, T t) { a[0] += mapper.applyAsLong(t); } }; final BinaryOperator combiner = SummingLong_Combiner; final Function finisher = SummingLong_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector summingLong2(final ToLongFunction mapper) { final Supplier supplier = SummingLong_Supplier_2; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(long[] a, T t) { a[0] += mapper.applyAsLong(t); } }; final BinaryOperator combiner = SummingLong_Combiner_2; final Function finisher = SummingLong_Finisher_2; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Returns a {@code Collector} that produces the sum of a double-valued * function applied to the input elements. If no elements are present, * the result is 0. * *

The sum returned can vary depending upon the order in which * values are recorded, due to accumulated rounding error in * addition of values of differing magnitudes. Values sorted by increasing * absolute magnitude tend to yield more accurate results. If any recorded * value is a {@code NaN} or the sum is at any point a {@code NaN} then the * sum will be {@code NaN}. * * @param the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ public static Collector summingDouble(final ToDoubleFunction mapper) { /* * In the arrays allocated for the collect operation, index 0 * holds the high-order bits of the running sum, index 1 holds * the low-order bits of the sum computed via compensated * summation, and index 2 holds the simple sum used to compute * the proper result if the stream contains infinite values of * the same sign. */ // return new CollectorImpl<>( // () -> new double[3], // (a, t) -> { sumWithCompensation(a, mapper.applyAsDouble(t)); // a[2] += mapper.applyAsDouble(t);}, // (a, b) -> { sumWithCompensation(a, b[0]); // a[2] += b[2]; // return sumWithCompensation(a, b[1]); }, // a -> computeFinalSum(a), // CH_NOID); final Supplier supplier = SummingDouble_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(double[] a, T t) { final double d = mapper.applyAsDouble(t); sumWithCompensation(a, d); a[2] += d; } }; final BinaryOperator combiner = SummingDouble_Combiner; final Function finisher = SummingDouble_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector summingDouble2(final ToDoubleFunction mapper) { /* * In the arrays allocated for the collect operation, index 0 * holds the high-order bits of the running sum, index 1 holds * the low-order bits of the sum computed via compensated * summation, and index 2 holds the simple sum used to compute * the proper result if the stream contains infinite values of * the same sign. */ // return new CollectorImpl<>( // () -> new double[3], // (a, t) -> { sumWithCompensation(a, mapper.applyAsDouble(t)); // a[2] += mapper.applyAsDouble(t);}, // (a, b) -> { sumWithCompensation(a, b[0]); // a[2] += b[2]; // return sumWithCompensation(a, b[1]); }, // a -> computeFinalSum(a), // CH_NOID); final Supplier supplier = SummingDouble_Supplier_2; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(double[] a, T t) { final double d = mapper.applyAsDouble(t); sumWithCompensation(a, d); a[2] += d; a[3]++; } }; final BinaryOperator combiner = SummingDouble_Combiner_2; final Function finisher = SummingDouble_Finisher_2; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Incorporate a new double value using Kahan summation / * compensation summation. * * High-order bits of the sum are in intermediateSum[0], low-order * bits of the sum are in intermediateSum[1], any additional * elements are application-specific. * * @param intermediateSum the high-order and low-order words of the intermediate sum * @param value the name value to be included in the running sum */ static double[] sumWithCompensation(double[] intermediateSum, double value) { double tmp = value - intermediateSum[1]; double sum = intermediateSum[0]; double velvel = sum + tmp; // Little wolf of rounding error intermediateSum[1] = (velvel - sum) - tmp; intermediateSum[0] = velvel; return intermediateSum; } /** * If the compensated sum is spuriously NaN from accumulating one * or more same-signed infinite values, return the * correctly-signed infinity stored in the simple sum. */ static double computeFinalSum(double[] summands) { // Better error bounds to add both terms as the final sum double tmp = summands[0] + summands[1]; double simpleSum = summands[summands.length - 1]; if (Double.isNaN(tmp) && Double.isInfinite(simpleSum)) return simpleSum; else return tmp; } /** * Returns a {@code Collector} that produces the arithmetic mean of an integer-valued * function applied to the input elements. If no elements are present, * the result is 0. * * @param the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ public static Collector averagingInt(final ToIntFunction mapper) { final Supplier supplier = AveragingInt_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(long[] a, T t) { a[0] += mapper.applyAsInt(t); a[1]++; } }; final BinaryOperator combiner = AveragingInt_Combiner; final Function finisher = AveragingInt_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector averagingInt2(final ToIntFunction mapper) { final Supplier supplier = AveragingInt_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(long[] a, T t) { a[0] += mapper.applyAsInt(t); a[1]++; } }; final BinaryOperator combiner = AveragingInt_Combiner; final Function finisher = AveragingInt_Finisher_2; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Returns a {@code Collector} that produces the arithmetic mean of a long-valued * function applied to the input elements. If no elements are present, * the result is 0. * * @param the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ public static Collector averagingLong(final ToLongFunction mapper) { final Supplier supplier = AveragingLong_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(long[] a, T t) { a[0] += mapper.applyAsLong(t); a[1]++; } }; final BinaryOperator combiner = AveragingLong_Combiner; final Function finisher = AveragingLong_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector averagingLong2(final ToLongFunction mapper) { final Supplier supplier = AveragingLong_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(long[] a, T t) { a[0] += mapper.applyAsLong(t); a[1]++; } }; final BinaryOperator combiner = AveragingLong_Combiner; final Function finisher = AveragingLong_Finisher_2; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Returns a {@code Collector} that produces the arithmetic mean of a double-valued * function applied to the input elements. If no elements are present, * the result is 0. * *

The average returned can vary depending upon the order in which * values are recorded, due to accumulated rounding error in * addition of values of differing magnitudes. Values sorted by increasing * absolute magnitude tend to yield more accurate results. If any recorded * value is a {@code NaN} or the sum is at any point a {@code NaN} then the * average will be {@code NaN}. * * @implNote The {@code double} format can represent all * consecutive integers in the range -253 to * 253. If the pipeline has more than 253 * values, the divisor in the average computation will saturate at * 253, leading to additional numerical errors. * * @param the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ public static Collector averagingDouble(final ToDoubleFunction mapper) { /* * In the arrays allocated for the collect operation, index 0 * holds the high-order bits of the running sum, index 1 holds * the low-order bits of the sum computed via compensated * summation, and index 2 holds the number of values seen. */ // return new CollectorImpl<>( // () -> new double[4], // (a, t) -> { sumWithCompensation(a, mapper.applyAsDouble(t)); a[2]++; a[3]+= mapper.applyAsDouble(t);}, // (a, b) -> { sumWithCompensation(a, b[0]); sumWithCompensation(a, b[1]); a[2] += b[2]; a[3] += b[3]; return a; }, // a -> (a[2] == 0) ? 0.0d : (computeFinalSum(a) / a[2]), // CH_NOID); final Supplier supplier = AveragingDouble_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(double[] a, T t) { final double d = mapper.applyAsDouble(t); sumWithCompensation(a, d); a[2]++; a[3] += d; } }; final BinaryOperator combiner = AveragingDouble_Combiner; final Function finisher = AveragingDouble_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector averagingDouble2(final ToDoubleFunction mapper) { final Supplier supplier = AveragingDouble_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(double[] a, T t) { final double d = mapper.applyAsDouble(t); sumWithCompensation(a, d); a[2]++; a[3] += d; } }; final BinaryOperator combiner = AveragingDouble_Combiner; final Function finisher = AveragingDouble_Finisher_2; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector summarizingChar(final ToCharFunction mapper) { final Supplier supplier = SummarizingChar_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(CharSummaryStatistics a, T t) { a.accept(mapper.applyAsChar(t)); } }; final BinaryOperator combiner = SummarizingChar_Combiner; return new CollectorImpl(supplier, accumulator, combiner, CH_ID); } public static Collector summarizingByte(final ToByteFunction mapper) { final Supplier supplier = SummarizingByte_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(ByteSummaryStatistics a, T t) { a.accept(mapper.applyAsByte(t)); } }; final BinaryOperator combiner = SummarizingByte_Combiner; return new CollectorImpl(supplier, accumulator, combiner, CH_ID); } public static Collector summarizingShort(final ToShortFunction mapper) { final Supplier supplier = SummarizingShort_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(ShortSummaryStatistics a, T t) { a.accept(mapper.applyAsShort(t)); } }; final BinaryOperator combiner = SummarizingShort_Combiner; return new CollectorImpl(supplier, accumulator, combiner, CH_ID); } /** * Returns a {@code Collector} which applies an {@code int}-producing * mapping function to each input element, and returns summary statistics * for the resulting values. * * @param the type of the input elements * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * * @see #summarizingDouble(ToDoubleFunction) * @see #summarizingLong(ToLongFunction) */ public static Collector summarizingInt(final ToIntFunction mapper) { final Supplier supplier = SummarizingInt_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(IntSummaryStatistics a, T t) { a.accept(mapper.applyAsInt(t)); } }; final BinaryOperator combiner = SummarizingInt_Combiner; return new CollectorImpl(supplier, accumulator, combiner, CH_ID); } /** * Returns a {@code Collector} which applies an {@code long}-producing * mapping function to each input element, and returns summary statistics * for the resulting values. * * @param the type of the input elements * @param mapper the mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * * @see #summarizingDouble(ToDoubleFunction) * @see #summarizingInt(ToIntFunction) */ public static Collector summarizingLong(final ToLongFunction mapper) { final Supplier supplier = SummarizingLong_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(LongSummaryStatistics a, T t) { a.accept(mapper.applyAsLong(t)); } }; final BinaryOperator combiner = SummarizingLong_Combiner; return new CollectorImpl(supplier, accumulator, combiner, CH_ID); } public static Collector summarizingFloat(final ToFloatFunction mapper) { final Supplier supplier = SummarizingFloat_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(FloatSummaryStatistics a, T t) { a.accept(mapper.applyAsFloat(t)); } }; final BinaryOperator combiner = SummarizingFloat_Combiner; return new CollectorImpl(supplier, accumulator, combiner, CH_ID); } /** * Returns a {@code Collector} which applies an {@code double}-producing * mapping function to each input element, and returns summary statistics * for the resulting values. * * @param the type of the input elements * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * * @see #summarizingLong(ToLongFunction) * @see #summarizingInt(ToIntFunction) */ public static Collector summarizingDouble(final ToDoubleFunction mapper) { final Supplier supplier = SummarizingDouble_Supplier; final BiConsumer accumulator = new BiConsumer() { @Override public void accept(DoubleSummaryStatistics a, T t) { a.accept(mapper.applyAsDouble(t)); } }; final BinaryOperator combiner = SummarizingDouble_Combiner; return new CollectorImpl(supplier, accumulator, combiner, CH_ID); } /** * Returns a {@code Collector} which performs a reduction of its * input elements under a specified {@code BinaryOperator} using the * provided identity. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple reduction on a stream, * use {@link Stream#reduce(Object, BinaryOperator)}} instead. * * @param element type for the input and output of the reduction * @param identity the identity value for the reduction (also, the value * that is returned when there are no input elements) * @param op a {@code BinaryOperator} used to reduce the input elements * @return a {@code Collector} which implements the reduction operation * * @see #reducing(BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ public static Collector reducing(final T identity, final BinaryOperator op) { final BiConsumer accumulator = new BiConsumer() { @Override public void accept(T[] a, T t) { a[0] = op.apply(a[0], t); } }; final BinaryOperator combiner = new BinaryOperator() { @Override public T[] apply(T[] a, T[] b) { a[0] = op.apply(a[0], b[0]); return a; } }; @SuppressWarnings("rawtypes") final Function finisher = (Function) Reducing_Finisher_0; return new CollectorImpl<>(boxSupplier(identity), accumulator, combiner, finisher, CH_NOID); } @SuppressWarnings("unchecked") private static Supplier boxSupplier(final T identity) { return new Supplier() { @Override public T[] get() { return (T[]) new Object[] { identity }; } }; } /** * Returns a {@code Collector} which performs a reduction of its * input elements under a specified {@code BinaryOperator}. The result * is described as an {@code Nullable}. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple reduction on a stream, * use {@link Stream#reduce(BinaryOperator)} instead. * *

For example, given a stream of {@code Person}, to calculate tallest * person in each city: *

{@code
     *     Comparator byHeight = Comparator.comparing(Person::getHeight);
     *     Map tallestByCity
     *         = people.stream().collect(groupingBy(Person::getCity, reducing(BinaryOperator.maxBy(byHeight))));
     * }
* * @param element type for the input and output of the reduction * @param op a {@code BinaryOperator} used to reduce the input elements * @return a {@code Collector} which implements the reduction operation * * @see #reducing(Object, BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ @SuppressWarnings("rawtypes") public static Collector> reducing(final BinaryOperator op) { final Supplier> supplier = new Supplier>() { @Override public OptionalBox get() { return new OptionalBox(op); } }; final BiConsumer, T> accumulator = (BiConsumer) Reducing_Accumulator; final BinaryOperator> combiner = (BinaryOperator) Reducing_Combiner; final Function, Nullable> finisher = (Function) Reducing_Finisher; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } private static class OptionalBox implements Consumer { BinaryOperator op = null; T value = null; boolean present = false; OptionalBox(final BinaryOperator op) { this.op = op; } @Override public void accept(T t) { if (present) { value = op.apply(value, t); } else { value = t; present = true; } } } @SuppressWarnings("rawtypes") public static Collector reducingOrGet(final BinaryOperator op, final Supplier other) { final Supplier> supplier = new Supplier>() { @Override public OptionalBox get() { return new OptionalBox(op); } }; final BiConsumer, T> accumulator = (BiConsumer) Reducing_Accumulator; final BinaryOperator> combiner = (BinaryOperator) Reducing_Combiner; final Function, T> finisher = new Function, T>() { @Override public T apply(OptionalBox a) { return a.present ? a.value : other.get(); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } @SuppressWarnings("rawtypes") public static Collector reducingOrThrow(final BinaryOperator op, final Supplier exceptionSupplier) { final Supplier> supplier = new Supplier>() { @Override public OptionalBox get() { return new OptionalBox(op); } }; final BiConsumer, T> accumulator = (BiConsumer) Reducing_Accumulator; final BinaryOperator> combiner = (BinaryOperator) Reducing_Combiner; final Function, T> finisher = new Function, T>() { @Override public T apply(OptionalBox a) { if (a.present) { return a.value; } else { throw exceptionSupplier.get(); } } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * Returns a {@code Collector} which performs a reduction of its * input elements under a specified mapping function and * {@code BinaryOperator}. This is a generalization of * {@link #reducing(Object, BinaryOperator)} which allows a transformation * of the elements before reduction. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple map-reduce on a stream, * use {@link Stream#map(Function)} and {@link Stream#reduce(Object, BinaryOperator)} * instead. * *

For example, given a stream of {@code Person}, to calculate the longest * last name of residents in each city: *

{@code
     *     Comparator byLength = Comparator.comparing(String::length);
     *     Map longestLastNameByCity
     *         = people.stream().collect(groupingBy(Person::getCity,
     *                                              reducing(Person::getLastName, BinaryOperator.maxBy(byLength))));
     * }
* * @param the type of the input elements * @param the type of the mapped values * @param identity the identity value for the reduction (also, the value * that is returned when there are no input elements) * @param mapper a mapping function to apply to each input value * @param op a {@code BinaryOperator} used to reduce the mapped values * @return a {@code Collector} implementing the map-reduce operation * * @see #reducing(Object, BinaryOperator) * @see #reducing(BinaryOperator) */ public static Collector reducing(final U identity, final Function mapper, final BinaryOperator op) { final BiConsumer accumulator = new BiConsumer() { @Override public void accept(U[] a, T t) { a[0] = op.apply(a[0], mapper.apply(t)); } }; final BinaryOperator combiner = new BinaryOperator() { @Override public U[] apply(U[] a, U[] b) { a[0] = op.apply(a[0], b[0]); return a; } }; @SuppressWarnings("rawtypes") final Function finisher = (Function) Reducing_Finisher_0; return new CollectorImpl<>(boxSupplier(identity), accumulator, combiner, finisher, CH_NOID); } @SuppressWarnings("rawtypes") public static Collector> reducing(final Function mapper, final BinaryOperator op) { final Supplier> supplier = new Supplier>() { @Override public OptionalBox2 get() { return new OptionalBox2(mapper, op); } }; final BiConsumer, T> accumulator = (BiConsumer) Reducing_Accumulator_2; final BinaryOperator> combiner = (BinaryOperator) Reducing_Combiner_2; final Function, Nullable> finisher = (Function) Reducing_Finisher_2; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } private static class OptionalBox2 implements Consumer { Function mapper; BinaryOperator op; U value = null; boolean present = false; OptionalBox2(final Function mapper, final BinaryOperator op) { this.mapper = mapper; this.op = op; } @Override public void accept(T t) { if (present) { value = op.apply(value, mapper.apply(t)); } else { value = mapper.apply(t); present = true; } } } @SuppressWarnings("rawtypes") public static Collector reducingOrGet(final Function mapper, final BinaryOperator op, final Supplier other) { final Supplier> supplier = new Supplier>() { @Override public OptionalBox2 get() { return new OptionalBox2(mapper, op); } }; final BiConsumer, T> accumulator = (BiConsumer) Reducing_Accumulator_2; final BinaryOperator> combiner = (BinaryOperator) Reducing_Combiner_2; final Function, U> finisher = new Function, U>() { @Override public U apply(OptionalBox2 a) { return a.present ? a.value : other.get(); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } @SuppressWarnings("rawtypes") public static Collector reducingOrThrow(final Function mapper, final BinaryOperator op, final Supplier exceptionSupplier) { final Supplier> supplier = new Supplier>() { @Override public OptionalBox2 get() { return new OptionalBox2(mapper, op); } }; final BiConsumer, T> accumulator = (BiConsumer) Reducing_Accumulator_2; final BinaryOperator> combiner = (BinaryOperator) Reducing_Combiner_2; final Function, U> finisher = new Function, U>() { @Override public U apply(OptionalBox2 a) { if (a.present) { return a.value; } else { throw exceptionSupplier.get(); } } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which computes a common prefix of input * {@code CharSequence} objects returning the result as {@code String}. For * empty input the empty {@code String} is returned. * *

* This method returns a * short-circuiting * collector: it may not process all the elements if the common prefix * is empty. * * @return a {@code Collector} which computes a common prefix. * @since 0.5.0 */ public static Collector commonPrefix() { final Supplier> supplier = new Supplier>() { @Override public Pair get() { return Pair.of(null, -1); } }; final BiConsumer, CharSequence> accumulator = new BiConsumer, CharSequence>() { @Override public void accept(Pair a, CharSequence t) { if (a.right == -1) { a.left = t; a.right = t.length(); } else if (a.right > 0) { if (t.length() < a.right) { a.right = t.length(); } for (int i = 0, to = a.right; i < to; i++) { if (a.left.charAt(i) != t.charAt(i)) { if (i > 0 && Character.isHighSurrogate(t.charAt(i - 1)) && (Character.isLowSurrogate(t.charAt(i)) || Character.isLowSurrogate(a.left.charAt(i)))) { i--; } a.right = i; break; } } } } }; final BinaryOperator> combiner = new BinaryOperator>() { @Override public Pair apply(Pair a, Pair b) { if (a.right == -1) { return b; } if (b.right != -1) { accumulator.accept(a, b.left.subSequence(0, b.right)); } return a; } }; final Function, String> finisher = new Function, String>() { @Override public String apply(Pair a) { return a.left == null ? "" : a.left.subSequence(0, a.right).toString(); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_UNORDERED); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * Returns a {@code Collector} which computes a common suffix of input * {@code CharSequence} objects returning the result as {@code String}. For * empty input the empty {@code String} is returned. * *

* The returned {@code Collector} handles specially Unicode surrogate pairs: * the returned suffix may start with * * Unicode low-surrogate code unit only if it's not preceded by * * Unicode high-surrogate code unit in any of the input sequences. * Normally the starting low-surrogate code unit is removed from the suffix. * *

* This method returns a * short-circuiting * collector: it may not process all the elements if the common suffix * is empty. * * @return a {@code Collector} which computes a common suffix. * @since 0.5.0 */ public static Collector commonSuffix() { final Supplier> supplier = new Supplier>() { @Override public Pair get() { return Pair.of(null, -1); } }; final BiConsumer, CharSequence> accumulator = new BiConsumer, CharSequence>() { @Override public void accept(Pair a, CharSequence t) { if (a.right == -1) { a.left = t; a.right = t.length(); } else if (a.right > 0) { int alen = a.left.length(); int blen = t.length(); if (blen < a.right) { a.right = blen; } for (int i = 0, to = a.right; i < to; i++) { if (a.left.charAt(alen - 1 - i) != t.charAt(blen - 1 - i)) { if (i > 0 && Character.isLowSurrogate(t.charAt(blen - i)) && (Character.isHighSurrogate(t.charAt(blen - 1 - i)) || Character.isHighSurrogate(a.left.charAt(alen - 1 - i)))) { i--; } a.right = i; break; } } } } }; final BinaryOperator> combiner = new BinaryOperator>() { @Override public Pair apply(Pair a, Pair b) { if (a.right == -1) { return b; } if (b.right != -1) { accumulator.accept(a, b.left.subSequence(b.left.length() - b.right, b.left.length())); } return a; } }; final Function, String> finisher = new Function, String>() { @Override public String apply(Pair a) { return a.left == null ? "" : a.left.subSequence(a.left.length() - a.right, a.left.length()).toString(); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_UNORDERED); } /** * It's copied from StreamEx: https://github.com/amaembo/streamex *
* * * Returns a collector which collects input elements into {@code List} * removing the elements following their dominator element. The dominator * elements are defined according to given isDominator {@code BiPredicate}. * The isDominator relation must be transitive (if A dominates over B and B * dominates over C, then A also dominates over C). * *

* This operation is similar to * {@code streamEx.collapse(isDominator).toList()}. The important difference * is that in this method {@code BiPredicate} accepts not the adjacent * stream elements, but the leftmost element of the series (current * dominator) and the current element. * *

* For example, consider the stream of numbers: * *

{@code
     * StreamEx stream = StreamEx.of(1, 5, 3, 4, 2, 7);
     * }
* *

* Using {@code stream.collapse((a, b) -> a >= b).toList()} you will get the * numbers which are bigger than their immediate predecessor ( * {@code [1, 5, 4, 7]}), because (3, 4) pair is not collapsed. However * using {@code stream.collect(dominators((a, b) -> a >= b))} you will get * the numbers which are bigger than any predecessor ({@code [1, 5, 7]}) as * 5 is the dominator element for the subsequent 3, 4 and 2. * * @param type of the input elements. * @param isDominator a non-interfering, stateless, transitive * {@code BiPredicate} which returns true if the first argument is * the dominator for the second argument. * @return a collector which collects input element into {@code List} * leaving only dominator elements. * @throws UnsupportedOperationException it's used in parallel stream. * @since 0.5.1 */ public static Collector> dominators(final BiPredicate isDominator) { final Supplier> supplier = new Supplier>() { private volatile boolean isCalled = false; @Override public List get() { if (isCalled) { throw new UnsupportedOperationException("The 'dominators' Collector only can be used in sequential stream"); } isCalled = true; return new ArrayList<>(); } }; final BiConsumer, T> accumulator = new BiConsumer, T>() { @Override public void accept(List a, T t) { if (a.isEmpty() || !isDominator.test(a.get(a.size() - 1), t)) a.add(t); } }; final BinaryOperator> combiner = new BinaryOperator>() { @Override public List apply(List a, List b) { if (N.notNullOrEmpty(a) && N.notNullOrEmpty(b)) { throw new UnsupportedOperationException("The 'dominators' Collector only can be used in sequential streams"); } if (a.isEmpty()) { return b; } int i = 0, l = b.size(); T last = a.get(a.size() - 1); while (i < l && isDominator.test(last, b.get(i))) { i++; } if (i < l) { a.addAll(b.subList(i, l)); } return a; } }; return new CollectorImpl<>(supplier, accumulator, combiner, CH_ID); } /** * Returns a {@code Collector} implementing a "group by" operation on * input elements of type {@code T}, grouping elements according to a * classification function, and returning the results in a {@code Map}. * *

The classification function maps elements to some key type {@code K}. * The collector produces a {@code Map>} whose keys are the * values resulting from applying the classification function to the input * elements, and whose corresponding values are {@code List}s containing the * input elements which map to the associated key under the classification * function. * *

There are no guarantees on the type, mutability, serializability, or * thread-safety of the {@code Map} or {@code List} objects returned. * @implSpec * This produces a result similar to: *

{@code
     *     groupingBy(classifier, toList());
     * }
* * @implNote * The returned {@code Collector} is not concurrent. For parallel stream * pipelines, the {@code combiner} function operates by merging the keys * from one map into another, which can be an expensive operation. If * preservation of the order in which elements appear in the resulting {@code Map} * collector is not required, using {@link #groupingByConcurrent(Function)} * may offer better parallel performance. * * @param the type of the input elements * @param the type of the keys * @param classifier the classifier function mapping input elements to keys * @return a {@code Collector} implementing the group-by operation * * @see #groupingBy(Function, Collector) * @see #groupingBy(Function, Collector, Supplier) * @see #groupingByConcurrent(Function) */ public static Collector>> groupingBy(Function classifier) { final Collector> downstream = toList(); return groupingBy(classifier, downstream); } public static >> Collector groupingBy(final Function classifier, final Supplier mapFactory) { final Collector> downstream = toList(); return groupingBy(classifier, downstream, mapFactory); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation * on input elements of type {@code T}, grouping elements according to a * classification function, and then performing a reduction operation on * the values associated with a given key using the specified downstream * {@code Collector}. * *

The classification function maps elements to some key type {@code K}. * The downstream collector operates on elements of type {@code T} and * produces a result of type {@code D}. The resulting collector produces a * {@code Map}. * *

There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code Map} returned. * *

For example, to compute the set of last names of people in each city: *

{@code
     *     Map> namesByCity
     *         = people.stream().collect(groupingBy(Person::getCity,
     *                                              mapping(Person::getLastName, toSet())));
     * }
* * @implNote * The returned {@code Collector} is not concurrent. For parallel stream * pipelines, the {@code combiner} function operates by merging the keys * from one map into another, which can be an expensive operation. If * preservation of the order in which elements are presented to the downstream * collector is not required, using {@link #groupingByConcurrent(Function, Collector)} * may offer better parallel performance. * * @param the type of the input elements * @param the type of the keys * @param the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded group-by operation * @see #groupingBy(Function) * * @see #groupingBy(Function, Collector, Supplier) * @see #groupingByConcurrent(Function, Collector) */ public static Collector> groupingBy(final Function classifier, final Collector downstream) { final Supplier> mapFactory = Suppliers.ofMap(); return groupingBy(classifier, downstream, mapFactory); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation * on input elements of type {@code T}, grouping elements according to a * classification function, and then performing a reduction operation on * the values associated with a given key using the specified downstream * {@code Collector}. The {@code Map} produced by the Collector is created * with the supplied factory function. * *

The classification function maps elements to some key type {@code K}. * The downstream collector operates on elements of type {@code T} and * produces a result of type {@code D}. The resulting collector produces a * {@code Map}. * *

For example, to compute the set of last names of people in each city, * where the city names are sorted: *

{@code
     *     Map> namesByCity
     *         = people.stream().collect(groupingBy(Person::getCity, TreeMap::new,
     *                                              mapping(Person::getLastName, toSet())));
     * }
* * @implNote * The returned {@code Collector} is not concurrent. For parallel stream * pipelines, the {@code combiner} function operates by merging the keys * from one map into another, which can be an expensive operation. If * preservation of the order in which elements are presented to the downstream * collector is not required, using {@link #groupingByConcurrent(Function, Collector, Supplier)} * may offer better parallel performance. * * @param the type of the input elements * @param the type of the keys * @param
the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param the type of the resulting {@code Map} * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @param mapFactory a function which, when called, produces a new empty * {@code Map} of the desired type * @return a {@code Collector} implementing the cascaded group-by operation * * @see #groupingBy(Function, Collector) * @see #groupingBy(Function) * @see #groupingByConcurrent(Function, Collector, Supplier) */ public static > Collector groupingBy(final Function classifier, final Collector downstream, final Supplier mapFactory) { // Supplier downstreamSupplier = downstream.supplier(); // BiConsumer downstreamAccumulator = downstream.accumulator(); // BiConsumer, T> accumulator = (m, t) -> { // K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); // A container = m.computeIfAbsent(key, k -> downstreamSupplier.get()); // downstreamAccumulator.accept(container, t); // }; // BinaryOperator> merger = Collectors.>mapMerger(downstream.combiner()); // @SuppressWarnings("unchecked") // Supplier> mangledFactory = (Supplier>) mapFactory; // // if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { // return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_ID); // } // else { // @SuppressWarnings("unchecked") // Function downstreamFinisher = (Function) downstream.finisher(); // Function, M> finisher = intermediate -> { // intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v)); // @SuppressWarnings("unchecked") // M castResult = (M) intermediate; // return castResult; // }; // return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_NOID); // } final Supplier downstreamSupplier = downstream.supplier(); final BiConsumer downstreamAccumulator = downstream.accumulator(); final Function mappingFunction = new Function() { @Override public A apply(K k) { return downstreamSupplier.get(); } }; final BiConsumer, T> accumulator = new BiConsumer, T>() { @Override public void accept(Map m, T t) { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); A container = computeIfAbsent(m, key, mappingFunction); downstreamAccumulator.accept(container, t); } }; final BinaryOperator> combiner = Collectors.> mapMerger(downstream.combiner()); @SuppressWarnings("unchecked") final Supplier> mangledFactory = (Supplier>) mapFactory; @SuppressWarnings("unchecked") final Function downstreamFinisher = (Function) downstream.finisher(); final BiFunction function = new BiFunction() { @Override public A apply(K k, A v) { return downstreamFinisher.apply(v); } }; final Function, M> finisher = new Function, M>() { @Override public M apply(Map intermediate) { replaceAll(intermediate, function); @SuppressWarnings("unchecked") M castResult = (M) intermediate; return castResult; } }; return new CollectorImpl<>(mangledFactory, accumulator, combiner, finisher, CH_NOID); } /** * Returns a concurrent {@code Collector} implementing a "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function. * *

This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector. * *

The classification function maps elements to some key type {@code K}. * The collector produces a {@code ConcurrentMap>} whose keys are the * values resulting from applying the classification function to the input * elements, and whose corresponding values are {@code List}s containing the * input elements which map to the associated key under the classification * function. * *

There are no guarantees on the type, mutability, or serializability * of the {@code Map} or {@code List} objects returned, or of the * thread-safety of the {@code List} objects returned. * @implSpec * This produces a result similar to: *

{@code
     *     groupingByConcurrent(classifier, toList());
     * }
* * @param the type of the input elements * @param the type of the keys * @param classifier a classifier function mapping input elements to keys * @return a concurrent, unordered {@code Collector} implementing the group-by operation * * @see #groupingBy(Function) * @see #groupingByConcurrent(Function, Collector) * @see #groupingByConcurrent(Function, Collector, Supplier) */ public static Collector>> groupingByConcurrent(Function classifier) { final Collector> downstream = toList(); return groupingByConcurrent(classifier, downstream); } public static >> Collector groupingByConcurrent(final Function classifier, final Supplier mapFactory) { final Collector> downstream = toList(); return groupingByConcurrent(classifier, downstream, mapFactory); } /** * Returns a concurrent {@code Collector} implementing a cascaded "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function, and then performing a reduction * operation on the values associated with a given key using the specified * downstream {@code Collector}. * *

This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector. * *

The classification function maps elements to some key type {@code K}. * The downstream collector operates on elements of type {@code T} and * produces a result of type {@code D}. The resulting collector produces a * {@code Map}. * *

For example, to compute the set of last names of people in each city, * where the city names are sorted: *

{@code
     *     ConcurrentMap> namesByCity
     *         = people.stream().collect(groupingByConcurrent(Person::getCity,
     *                                                        mapping(Person::getLastName, toSet())));
     * }
* * @param the type of the input elements * @param the type of the keys * @param
the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @return a concurrent, unordered {@code Collector} implementing the cascaded group-by operation * * @see #groupingBy(Function, Collector) * @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function, Collector, Supplier) */ public static Collector> groupingByConcurrent(Function classifier, Collector downstream) { final Supplier> mapFactory = Suppliers.ofConcurrentMap(); return groupingByConcurrent(classifier, downstream, mapFactory); } /** * Returns a concurrent {@code Collector} implementing a cascaded "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function, and then performing a reduction * operation on the values associated with a given key using the specified * downstream {@code Collector}. The {@code ConcurrentMap} produced by the * Collector is created with the supplied factory function. * *

This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector. * *

The classification function maps elements to some key type {@code K}. * The downstream collector operates on elements of type {@code T} and * produces a result of type {@code D}. The resulting collector produces a * {@code Map}. * *

For example, to compute the set of last names of people in each city, * where the city names are sorted: *

{@code
     *     ConcurrentMap> namesByCity
     *         = people.stream().collect(groupingBy(Person::getCity, ConcurrentSkipListMap::new,
     *                                              mapping(Person::getLastName, toSet())));
     * }
* * * @param the type of the input elements * @param the type of the keys * @param
the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param the type of the resulting {@code ConcurrentMap} * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction * @param mapFactory a function which, when called, produces a new empty * {@code ConcurrentMap} of the desired type * @return a concurrent, unordered {@code Collector} implementing the cascaded group-by operation * * @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function, Collector) * @see #groupingBy(Function, Collector, Supplier) */ public static > Collector groupingByConcurrent(final Function classifier, Collector downstream, final Supplier mapFactory) { // Supplier downstreamSupplier = downstream.supplier(); // BiConsumer downstreamAccumulator = downstream.accumulator(); // BinaryOperator> merger = Collectors.> mapMerger(downstream.combiner()); // @SuppressWarnings("unchecked") // Supplier> mangledFactory = (Supplier>) mapFactory; // BiConsumer, T> accumulator; // if (downstream.characteristics().contains(Collector.Characteristics.CONCURRENT)) { // accumulator = (m, t) -> { // K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); // A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); // downstreamAccumulator.accept(resultContainer, t); // }; // } else { // accumulator = (m, t) -> { // K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); // A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); // synchronized (resultContainer) { // downstreamAccumulator.accept(resultContainer, t); // } // }; // } // // if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { // return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_CONCURRENT_ID); // } else { // @SuppressWarnings("unchecked") // Function downstreamFinisher = (Function) downstream.finisher(); // Function, M> finisher = intermediate -> { // intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v)); // @SuppressWarnings("unchecked") // M castResult = (M) intermediate; // return castResult; // }; // return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_CONCURRENT_NOID); // } final Supplier downstreamSupplier = downstream.supplier(); final BiConsumer downstreamAccumulator = downstream.accumulator(); final Function mappingFunction = new Function() { @Override public A apply(K k) { return downstreamSupplier.get(); } }; final BiConsumer, T> accumulator = new BiConsumer, T>() { @Override public void accept(ConcurrentMap m, T t) { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); A container = computeIfAbsent(m, key, mappingFunction); downstreamAccumulator.accept(container, t); } }; final BinaryOperator> combiner = Collectors.> mapMerger(downstream.combiner()); @SuppressWarnings("unchecked") final Supplier> mangledFactory = (Supplier>) mapFactory; @SuppressWarnings("unchecked") final Function downstreamFinisher = (Function) downstream.finisher(); final BiFunction function = new BiFunction() { @Override public A apply(K k, A v) { return downstreamFinisher.apply(v); } }; final Function, M> finisher = new Function, M>() { @Override public M apply(ConcurrentMap intermediate) { replaceAll(intermediate, function); @SuppressWarnings("unchecked") M castResult = (M) intermediate; return castResult; } }; return new CollectorImpl<>(mangledFactory, accumulator, combiner, finisher, CH_CONCURRENT_NOID); } /** * Returns a {@code Collector} which partitions the input elements according * to a {@code Predicate}, and organizes them into a * {@code Map>}. * * There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code Map} returned. * * @param the type of the input elements * @param predicate a predicate used for classifying input elements * @return a {@code Collector} implementing the partitioning operation * * @see #partitioningBy(Predicate, Collector) */ public static Collector>> partitioningBy(Predicate predicate) { final Collector> downstream = toList(); return partitioningBy(predicate, downstream); } /** * Returns a {@code Collector} which partitions the input elements according * to a {@code Predicate}, reduces the values in each partition according to * another {@code Collector}, and organizes them into a * {@code Map} whose values are the result of the downstream * reduction. * *

There are no guarantees on the type, mutability, * serializability, or thread-safety of the {@code Map} returned. * * @param the type of the input elements * @param the intermediate accumulation type of the downstream collector * @param the result type of the downstream reduction * @param predicate a predicate used for classifying input elements * @param downstream a {@code Collector} implementing the downstream * reduction * @return a {@code Collector} implementing the cascaded partitioning * operation * * @see #partitioningBy(Predicate) */ public static Collector> partitioningBy(final Predicate predicate, final Collector downstream) { // BiConsumer downstreamAccumulator = downstream.accumulator(); // BiConsumer, T> accumulator = (result, t) -> downstreamAccumulator.accept(predicate.test(t) ? result.forTrue : result.forFalse, t); // BinaryOperator op = downstream.combiner(); // BinaryOperator> merger = (left, right) -> new Partition<>(op.apply(left.forTrue, right.forTrue), op.apply(left.forFalse, right.forFalse)); // Supplier> supplier = () -> new Partition<>(downstream.supplier().get(), downstream.supplier().get()); // if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { // return new CollectorImpl<>(supplier, accumulator, merger, CH_ID); // } else { // Function, Map> finisher = par -> new Partition<>(downstream.finisher().apply(par.forTrue), // downstream.finisher().apply(par.forFalse)); // return new CollectorImpl<>(supplier, accumulator, merger, finisher, CH_NOID); // } final Supplier> supplier = new Supplier>() { @Override public Partition get() { return new Partition<>(downstream.supplier().get(), downstream.supplier().get()); } }; final BiConsumer downstreamAccumulator = downstream.accumulator(); final BiConsumer, T> accumulator = new BiConsumer, T>() { @Override public void accept(Partition a, T t) { downstreamAccumulator.accept(predicate.test(t) ? a.forTrue : a.forFalse, t); } }; final BinaryOperator op = downstream.combiner(); final BinaryOperator> combiner = new BinaryOperator>() { @Override public Partition apply(Partition a, Partition b) { return new Partition<>(op.apply(a.forTrue, b.forTrue), op.apply(a.forFalse, b.forFalse)); } }; final Function, Map> finisher = new Function, Map>() { @Override public Map apply(Partition a) { return new Partition<>(downstream.finisher().apply(a.forTrue), downstream.finisher().apply(a.forFalse)); } }; return new CollectorImpl<>(supplier, accumulator, combiner, finisher, CH_NOID); } public static Collector, ?, Map> toMap() { final Function, ? extends K> keyExtractor = Fn. key(); final Function, ? extends V> valueMapper = Fn. value(); return toMap(keyExtractor, valueMapper); } public static Collector, ?, Map> toMap(final BinaryOperator mergeFunction) { final Function, ? extends K> keyExtractor = Fn. key(); final Function, ? extends V> valueMapper = Fn. value(); return toMap(keyExtractor, valueMapper, mergeFunction); } public static > Collector, ?, M> toMap(final Supplier mapFactory) { final Function, ? extends K> keyExtractor = Fn. key(); final Function, ? extends V> valueMapper = Fn. value(); return toMap(keyExtractor, valueMapper, mapFactory); } public static > Collector, ?, M> toMap(final BinaryOperator mergeFunction, final Supplier mapFactory) { final Function, ? extends K> keyExtractor = Fn. key(); final Function, ? extends V> valueMapper = Fn. value(); return toMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } /** * Returns a {@code Collector} that accumulates elements into a * {@code Map} whose keys and values are the result of applying the provided * mapping functions to the input elements. * *

If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use {@link #toMap(Function, Function, BinaryOperator)} * instead. * * @apiNote * It is common for either the key or the value to be the input elements. * In this case, the utility method * {@link java.util.function.Function#identity()} may be helpful. * For example, the following produces a {@code Map} mapping * students to their grade point average: *

{@code
     *     Map studentToGPA
     *         students.stream().collect(toMap(Functions.identity(),
     *                                         student -> computeGPA(student)));
     * }
* And the following produces a {@code Map} mapping a unique identifier to * students: *
{@code
     *     Map studentIdToStudent
     *         students.stream().collect(toMap(Student::getId,
     *                                         Functions.identity());
     * }
* * @implNote * The returned {@code Collector} is not concurrent. For parallel stream * pipelines, the {@code combiner} function operates by merging the keys * from one map into another, which can be an expensive operation. If it is * not required that results are inserted into the {@code Map} in encounter * order, using {@link #toConcurrentMap(Function, Function)} * may offer better parallel performance. * * @param the type of the input elements * @param the output type of the key mapping function * @param the output type of the value mapping function * @param keyExtractor a mapping function to produce keys * @param valueMapper a mapping function to produce values * @return a {@code Collector} which collects elements into a {@code Map} * whose keys and values are the result of applying mapping functions to * the input elements * * @see #toMap(Function, Function, BinaryOperator) * @see #toMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function) */ public static Collector> toMap(Function keyExtractor, Function valueMapper) { final BinaryOperator mergeFunction = Fn.throwingMerger(); return toMap(keyExtractor, valueMapper, mergeFunction); } /** * Returns a {@code Collector} that accumulates elements into a * {@code Map} whose keys and values are the result of applying the provided * mapping functions to the input elements. * *

If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements * mapping to the same key. The other forms of {@code toMap} simply use * a merge function that throws unconditionally, but you can easily write * more flexible merge policies. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a * {@code Map} mapping names to a concatenated list of addresses: *

{@code
     *     Map phoneBook
     *         people.stream().collect(toMap(Person::getName,
     *                                       Person::getAddress,
     *                                       (s, a) -> s + ", " + a));
     * }
* * @implNote * The returned {@code Collector} is not concurrent. For parallel stream * pipelines, the {@code combiner} function operates by merging the keys * from one map into another, which can be an expensive operation. If it is * not required that results are merged into the {@code Map} in encounter * order, using {@link #toConcurrentMap(Function, Function, BinaryOperator)} * may offer better parallel performance. * * @param the type of the input elements * @param the output type of the key mapping function * @param the output type of the value mapping function * @param keyExtractor a mapping function to produce keys * @param valueMapper a mapping function to produce values * @param mergeFunction a merge function, used to resolve collisions between * values associated with the same key, as supplied * to {@link Map#merge(Object, Object, BiFunction)} * @return a {@code Collector} which collects elements into a {@code Map} * whose keys are the result of applying a key mapping function to the input * elements, and whose values are the result of applying a value mapping * function to all input elements equal to the key and combining them * using the merge function * * @see #toMap(Function, Function) * @see #toMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function, BinaryOperator) */ public static Collector> toMap(Function keyExtractor, Function valueMapper, BinaryOperator mergeFunction) { final Supplier> mapFactory = Suppliers. ofMap(); return toMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } public static > Collector toMap(final Function keyExtractor, final Function valueMapper, final Supplier mapFactory) { final BinaryOperator mergeFunction = Fn.throwingMerger(); return toMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } /** * Returns a {@code Collector} that accumulates elements into a * {@code Map} whose keys and values are the result of applying the provided * mapping functions to the input elements. * *

If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The {@code Map} * is created by a provided supplier function. * * @implNote * The returned {@code Collector} is not concurrent. For parallel stream * pipelines, the {@code combiner} function operates by merging the keys * from one map into another, which can be an expensive operation. If it is * not required that results are merged into the {@code Map} in encounter * order, using {@link #toConcurrentMap(Function, Function, BinaryOperator, Supplier)} * may offer better parallel performance. * * @param the type of the input elements * @param the output type of the key mapping function * @param the output type of the value mapping function * @param the type of the resulting {@code Map} * @param keyExtractor a mapping function to produce keys * @param valueMapper a mapping function to produce values * @param mergeFunction a merge function, used to resolve collisions between * values associated with the same key, as supplied * to {@link Map#merge(Object, Object, BiFunction)} * @param mapFactory a function which returns a new, empty {@code Map} into * which the results will be inserted * @return a {@code Collector} which collects elements into a {@code Map} * whose keys are the result of applying a key mapping function to the input * elements, and whose values are the result of applying a value mapping * function to all input elements equal to the key and combining them * using the merge function * * @see #toMap(Function, Function) * @see #toMap(Function, Function, BinaryOperator) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static > Collector toMap(final Function keyExtractor, final Function valueMapper, final BinaryOperator mergeFunction, final Supplier mapFactory) { final BiConsumer accumulator = new BiConsumer() { @Override public void accept(M map, T element) { merge(map, keyExtractor.apply(element), valueMapper.apply(element), mergeFunction); } }; final BinaryOperator combiner = (BinaryOperator) mapMerger(mergeFunction); return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_ID); } public static Collector, ?, ImmutableMap> toImmutableMap() { final Collector, ?, Map> downstream = toMap(); @SuppressWarnings("rawtypes") final Function, ImmutableMap> finisher = (Function) ImmutableMap_Finisher; return collectingAndThen(downstream, finisher); } public static Collector, ?, ImmutableMap> toImmutableMap(final BinaryOperator mergeFunction) { final Collector, ?, Map> downstream = toMap(mergeFunction); @SuppressWarnings("rawtypes") final Function, ImmutableMap> finisher = (Function) ImmutableMap_Finisher; return collectingAndThen(downstream, finisher); } public static Collector> toImmutableMap(Function keyExtractor, Function valueMapper) { final Collector> downstream = toMap(keyExtractor, valueMapper); @SuppressWarnings("rawtypes") final Function, ImmutableMap> finisher = (Function) ImmutableMap_Finisher; return collectingAndThen(downstream, finisher); } public static Collector> toImmutableMap(Function keyExtractor, Function valueMapper, BinaryOperator mergeFunction) { final Collector> downstream = toMap(keyExtractor, valueMapper, mergeFunction); @SuppressWarnings("rawtypes") final Function, ImmutableMap> finisher = (Function) ImmutableMap_Finisher; return collectingAndThen(downstream, finisher); } /** * * @param keyExtractor * @param valueMapper * @return * @see #toMap(Function, Function) */ public static Collector> toLinkedHashMap(Function keyExtractor, Function valueMapper) { final BinaryOperator mergeFunction = Fn.throwingMerger(); return toLinkedHashMap(keyExtractor, valueMapper, mergeFunction); } /** * * @param keyExtractor * @param valueMapper * @param mergeFunction * @return * @see #toMap(Function, Function, BinaryOperator) */ public static Collector> toLinkedHashMap(Function keyExtractor, Function valueMapper, BinaryOperator mergeFunction) { final Supplier> mapFactory = Suppliers.ofLinkedHashMap(); return toMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } /** * Returns a concurrent {@code Collector} that accumulates elements into a * {@code ConcurrentMap} whose keys and values are the result of applying * the provided mapping functions to the input elements. * *

If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use * {@link #toConcurrentMap(Function, Function, BinaryOperator)} instead. * * @apiNote * It is common for either the key or the value to be the input elements. * In this case, the utility method * {@link java.util.function.Function#identity()} may be helpful. * For example, the following produces a {@code Map} mapping * students to their grade point average: *

{@code
     *     Map studentToGPA
     *         students.stream().collect(toMap(Functions.identity(),
     *                                         student -> computeGPA(student)));
     * }
* And the following produces a {@code Map} mapping a unique identifier to * students: *
{@code
     *     Map studentIdToStudent
     *         students.stream().collect(toConcurrentMap(Student::getId,
     *                                                   Functions.identity());
     * }
* *

This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector. * * @param the type of the input elements * @param the output type of the key mapping function * @param the output type of the value mapping function * @param keyExtractor the mapping function to produce keys * @param valueMapper the mapping function to produce values * @return a concurrent, unordered {@code Collector} which collects elements into a * {@code ConcurrentMap} whose keys are the result of applying a key mapping * function to the input elements, and whose values are the result of * applying a value mapping function to the input elements * * @see #toMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static Collector> toConcurrentMap(Function keyExtractor, Function valueMapper) { final BinaryOperator mergeFunction = Fn.throwingMerger(); return toConcurrentMap(keyExtractor, valueMapper, mergeFunction); } public static > Collector toConcurrentMap(final Function keyExtractor, final Function valueMapper, Supplier mapFactory) { final BinaryOperator mergeFunction = Fn.throwingMerger(); return toConcurrentMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } /** * Returns a concurrent {@code Collector} that accumulates elements into a * {@code ConcurrentMap} whose keys and values are the result of applying * the provided mapping functions to the input elements. * *

If the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements * mapping to the same key. The other forms of {@code toConcurrentMap} simply use * a merge function that throws unconditionally, but you can easily write * more flexible merge policies. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a * {@code Map} mapping names to a concatenated list of addresses: *

{@code
     *     Map phoneBook
     *         people.stream().collect(toConcurrentMap(Person::getName,
     *                                                 Person::getAddress,
     *                                                 (s, a) -> s + ", " + a));
     * }
* *

This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector. * * @param the type of the input elements * @param the output type of the key mapping function * @param the output type of the value mapping function * @param keyExtractor a mapping function to produce keys * @param valueMapper a mapping function to produce values * @param mergeFunction a merge function, used to resolve collisions between * values associated with the same key, as supplied * to {@link Map#merge(Object, Object, BiFunction)} * @return a concurrent, unordered {@code Collector} which collects elements into a * {@code ConcurrentMap} whose keys are the result of applying a key mapping * function to the input elements, and whose values are the result of * applying a value mapping function to all input elements equal to the key * and combining them using the merge function * * @see #toConcurrentMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) * @see #toMap(Function, Function, BinaryOperator) */ public static Collector> toConcurrentMap(Function keyExtractor, Function valueMapper, BinaryOperator mergeFunction) { final Supplier> mapFactory = Suppliers.ofConcurrentMap(); return toConcurrentMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } /** * Returns a concurrent {@code Collector} that accumulates elements into a * {@code ConcurrentMap} whose keys and values are the result of applying * the provided mapping functions to the input elements. * *

If the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The * {@code ConcurrentMap} is created by a provided supplier function. * *

This is a {@link Collector.Characteristics#CONCURRENT concurrent} and * {@link Collector.Characteristics#UNORDERED unordered} Collector. * * @param the type of the input elements * @param the output type of the key mapping function * @param the output type of the value mapping function * @param the type of the resulting {@code ConcurrentMap} * @param keyExtractor a mapping function to produce keys * @param valueMapper a mapping function to produce values * @param mergeFunction a merge function, used to resolve collisions between * values associated with the same key, as supplied * to {@link Map#merge(Object, Object, BiFunction)} * @param mapFactory a function which returns a new, empty {@code Map} into * which the results will be inserted * @return a concurrent, unordered {@code Collector} which collects elements into a * {@code ConcurrentMap} whose keys are the result of applying a key mapping * function to the input elements, and whose values are the result of * applying a value mapping function to all input elements equal to the key * and combining them using the merge function * * @see #toConcurrentMap(Function, Function) * @see #toConcurrentMap(Function, Function, BinaryOperator) * @see #toMap(Function, Function, BinaryOperator, Supplier) */ public static > Collector toConcurrentMap(final Function keyExtractor, final Function valueMapper, final BinaryOperator mergeFunction, Supplier mapFactory) { final BiConsumer accumulator = new BiConsumer() { @Override public void accept(M map, T element) { merge(map, keyExtractor.apply(element), valueMapper.apply(element), mergeFunction); } }; final BinaryOperator combiner = (BinaryOperator) concurrentMapMerger(mergeFunction); return new CollectorImpl(mapFactory, accumulator, combiner, CH_CONCURRENT_ID); } public static Collector> toBiMap(Function keyExtractor, Function valueMapper) { final BinaryOperator mergeFunction = Fn.throwingMerger(); return toBiMap(keyExtractor, valueMapper, mergeFunction); } public static Collector> toBiMap(final Function keyExtractor, final Function valueMapper, final Supplier> mapFactory) { final BinaryOperator mergeFunction = Fn.throwingMerger(); return toBiMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } public static Collector> toBiMap(Function keyExtractor, Function valueMapper, BinaryOperator mergeFunction) { final Supplier> mapFactory = Suppliers.ofBiMap(); return toBiMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } public static Collector> toBiMap(final Function keyExtractor, final Function valueMapper, final BinaryOperator mergeFunction, final Supplier> mapFactory) { return toMap(keyExtractor, valueMapper, mergeFunction, mapFactory); } @SuppressWarnings("rawtypes") public static Collector, ?, ListMultimap> toMultimap() { final Function, ? extends K> keyExtractor = (Function) Fn.key(); final Function, ? extends E> valueMapper = (Function) Fn.value(); return toMultimap(keyExtractor, valueMapper); } @SuppressWarnings("rawtypes") public static , M extends Multimap> Collector, ?, M> toMultimap( final Supplier mapFactory) { final Function, ? extends K> keyExtractor = (Function) Fn.key(); final Function, ? extends E> valueMapper = (Function) Fn.value(); return toMultimap(keyExtractor, valueMapper, mapFactory); } public static Collector> toMultimap(Function keyExtractor) { final Function valueMapper = Fn.identity(); return toMultimap(keyExtractor, valueMapper); } public static , M extends Multimap> Collector toMultimap( final Function keyExtractor, final Supplier mapFactory) { final Function valueMapper = Fn.identity(); return toMultimap(keyExtractor, valueMapper, mapFactory); } public static Collector> toMultimap(Function keyExtractor, Function valueMapper) { final Supplier> mapFactory = Suppliers.ofListMultimap(); return toMultimap(keyExtractor, valueMapper, mapFactory); } public static , M extends Multimap> Collector toMultimap( final Function keyExtractor, final Function valueMapper, final Supplier mapFactory) { final BiConsumer accumulator = new BiConsumer() { @Override public void accept(M map, T element) { map.put(keyExtractor.apply(element), valueMapper.apply(element)); } }; final BinaryOperator combiner = Collectors. multimapMerger(); return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_ID); } public static Collector toDataSet() { return toDataSet(null); } public static Collector toDataSet(final List columnNames) { @SuppressWarnings("rawtypes") final Collector, List> collector = (Collector) toList(); final Function, DataSet> finisher = new Function, DataSet>() { @Override public DataSet apply(List t) { return N.newDataSet(columnNames, t); } }; return new CollectorImpl, DataSet>(collector.supplier(), collector.accumulator(), collector.combiner(), finisher, CH_NOID); } // public static Collector toDataSet(final String entityName, final Class entityClass, final List columnNames) { // @SuppressWarnings("rawtypes") // final Collector, List> collector = (Collector) toList(); // // final Function, DataSet> finisher = new Function, DataSet>() { // @Override // public DataSet apply(List t) { // return N.newDataSet(entityName, entityClass, columnNames, t); // } // }; // // return new CollectorImpl, DataSet>(collector.supplier(), collector.accumulator(), collector.combiner(), finisher); // } static void replaceAll(Map map, BiFunction function) { Objects.requireNonNull(function); for (Map.Entry entry : map.entrySet()) { K k; V v; try { k = entry.getKey(); v = entry.getValue(); } catch (IllegalStateException ise) { // this usually means the entry is no longer in the map. throw new ConcurrentModificationException(ise); } // ise thrown from function is not a cme. v = function.apply(k, v); try { entry.setValue(v); } catch (IllegalStateException ise) { // this usually means the entry is no longer in the map. throw new ConcurrentModificationException(ise); } } } private static V computeIfAbsent(Map map, K key, Function mappingFunction) { Objects.requireNonNull(mappingFunction); V v = null; if ((v = map.get(key)) == null) { V newValue = null; if ((newValue = mappingFunction.apply(key)) != null) { map.put(key, newValue); return newValue; } } return v; } /** * {@code BinaryOperator} that merges the contents of its right * argument into its left argument, using the provided merge function to * handle duplicate keys. * * @param type of the map keys * @param type of the map values * @param type of the map * @param mergeFunction A merge function suitable for * {@link Map#merge(Object, Object, BiFunction) Map.merge()} * @return a merge function for two maps */ private static > BinaryOperator mapMerger(final BinaryOperator mergeFunction) { Objects.requireNonNull(mergeFunction); return new BinaryOperator() { @Override public M apply(M m1, M m2) { /* K key = null; V value = null; for (Map.Entry e : m2.entrySet()) { Objects.requireNonNull(e.getValue()); key = e.getKey(); value = e.getValue(); V oldValue = m1.get(key); V newValue = (oldValue == null) ? value : mergeFunction.apply(oldValue, value); if (newValue == null) { m1.remove(key); } else { m1.put(key, newValue); } } */ for (Map.Entry e : m2.entrySet()) { final V oldValue = m1.get(e.getKey()); if (oldValue == null && m1.containsKey(e.getKey()) == false) { m1.put(e.getKey(), e.getValue()); } else { m1.put(e.getKey(), mergeFunction.apply(oldValue, e.getValue())); } } return m1; } }; } private static > BinaryOperator concurrentMapMerger(final BinaryOperator mergeFunction) { Objects.requireNonNull(mergeFunction); return new BinaryOperator() { @Override public M apply(M m1, M m2) { for (Map.Entry e : m2.entrySet()) { final V oldValue = m1.get(e.getKey()); if (oldValue == null && m1.containsKey(e.getKey()) == false) { m1.put(e.getKey(), e.getValue()); } else { m1.put(e.getKey(), mergeFunction.apply(oldValue, e.getValue())); } } return m1; } }; } private static , M extends Multimap> BinaryOperator multimapMerger() { return new BinaryOperator() { @Override public M apply(M m1, M m2) { K key = null; V value = null; for (Map.Entry e : m2.entrySet()) { Objects.requireNonNull(e.getValue()); key = e.getKey(); value = e.getValue(); if (N.notNullOrEmpty(value)) { V oldValue = m1.get(key); if (oldValue == null) { m1.putAll(key, value); } else { oldValue.addAll(value); } } } return m1; } }; } /* static V merge(Map map, K key, V value, BiFunction remappingFunction) { Objects.requireNonNull(remappingFunction); Objects.requireNonNull(value); V oldValue = map.get(key); V newValue = (oldValue == null) ? value : remappingFunction.apply(oldValue, value); if (newValue == null) { map.remove(key); } else { map.put(key, newValue); } return newValue; } */ static void merge(Map map, K key, V value, BiFunction remappingFunction) { Objects.requireNonNull(remappingFunction); final V oldValue = map.get(key); if (oldValue == null && map.containsKey(key) == false) { map.put(key, value); } else { map.put(key, remappingFunction.apply(oldValue, value)); } } /** * Implementation class used by partitioningBy. */ private static final class Partition extends AbstractMap implements Map { final T forTrue; final T forFalse; Partition(T forTrue, T forFalse) { this.forTrue = forTrue; this.forFalse = forFalse; } @Override public Set> entrySet() { return new AbstractSet>() { @Override public Iterator> iterator() { Map.Entry falseEntry = new SimpleImmutableEntry<>(false, forFalse); Map.Entry trueEntry = new SimpleImmutableEntry<>(true, forTrue); return Arrays.asList(falseEntry, trueEntry).iterator(); } @Override public int size() { return 2; } }; } } public static final class MoreCollectors extends Collectors { private MoreCollectors() { // singleton } } }









* The returned {@code Collector} handles specially Unicode surrogate pairs: * the returned prefix may end with * * Unicode high-surrogate code unit only if it's not succeeded by * * Unicode low-surrogate code unit in any of the input sequences. * Normally the ending high-surrogate code unit is removed from the prefix. * *