All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.landawn.abacus.util.stream.Stream Maven / Gradle / Ivy

/*
 * Copyright (C) 2016, 2017, 2018, 2019 HaiYang Li
 *
 * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
 * in compliance with the License. You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software distributed under the License
 * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
 * or implied. See the License for the specific language governing permissions and limitations under
 * the License.
 */
package com.landawn.abacus.util.stream;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Reader;
import java.io.Writer;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.security.SecureRandom;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.BiPredicate;
import java.util.function.BinaryOperator;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import java.util.function.DoubleConsumer;
import java.util.function.Function;
import java.util.function.IntBinaryOperator;
import java.util.function.IntConsumer;
import java.util.function.IntFunction;
import java.util.function.LongConsumer;
import java.util.function.LongFunction;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.function.ToDoubleFunction;
import java.util.function.ToIntFunction;
import java.util.function.ToLongFunction;
import java.util.function.UnaryOperator;
import java.util.regex.Pattern;
import java.util.stream.Collector;

import com.landawn.abacus.annotation.Beta;
import com.landawn.abacus.annotation.IntermediateOp;
import com.landawn.abacus.annotation.LazyEvaluation;
import com.landawn.abacus.annotation.ParallelSupported;
import com.landawn.abacus.annotation.SequentialOnly;
import com.landawn.abacus.annotation.TerminalOp;
import com.landawn.abacus.annotation.TerminalOpTriggered;
import com.landawn.abacus.util.Array;
import com.landawn.abacus.util.AsyncExecutor;
import com.landawn.abacus.util.ByteIterator;
import com.landawn.abacus.util.CharIterator;
import com.landawn.abacus.util.Charsets;
import com.landawn.abacus.util.Comparators;
import com.landawn.abacus.util.ContinuableFuture;
import com.landawn.abacus.util.DataSet;
import com.landawn.abacus.util.Difference;
import com.landawn.abacus.util.DoubleIterator;
import com.landawn.abacus.util.Duration;
import com.landawn.abacus.util.FloatIterator;
import com.landawn.abacus.util.Fn;
import com.landawn.abacus.util.Fn.BiConsumers;
import com.landawn.abacus.util.Fn.Suppliers;
import com.landawn.abacus.util.Holder;
import com.landawn.abacus.util.IOUtil;
import com.landawn.abacus.util.ImmutableMap;
import com.landawn.abacus.util.Indexed;
import com.landawn.abacus.util.IntIterator;
import com.landawn.abacus.util.IntList;
import com.landawn.abacus.util.InternalUtil;
import com.landawn.abacus.util.Iterables;
import com.landawn.abacus.util.Iterators;
import com.landawn.abacus.util.LineIterator;
import com.landawn.abacus.util.ListMultimap;
import com.landawn.abacus.util.LongIterator;
import com.landawn.abacus.util.MergeResult;
import com.landawn.abacus.util.Multimap;
import com.landawn.abacus.util.MutableBoolean;
import com.landawn.abacus.util.MutableInt;
import com.landawn.abacus.util.MutableLong;
import com.landawn.abacus.util.N;
import com.landawn.abacus.util.NoCachingNoUpdating;
import com.landawn.abacus.util.ObjIterator;
import com.landawn.abacus.util.Pair;
import com.landawn.abacus.util.Percentage;
import com.landawn.abacus.util.ShortIterator;
import com.landawn.abacus.util.Splitter;
import com.landawn.abacus.util.Throwables;
import com.landawn.abacus.util.cs;
import com.landawn.abacus.util.u.Optional;
import com.landawn.abacus.util.u.OptionalDouble;
import com.landawn.abacus.util.u.OptionalInt;
import com.landawn.abacus.util.u.OptionalLong;
import com.landawn.abacus.util.function.ByteBiFunction;
import com.landawn.abacus.util.function.ByteNFunction;
import com.landawn.abacus.util.function.ByteTriFunction;
import com.landawn.abacus.util.function.CharBiFunction;
import com.landawn.abacus.util.function.CharNFunction;
import com.landawn.abacus.util.function.CharTriFunction;
import com.landawn.abacus.util.function.DoubleBiFunction;
import com.landawn.abacus.util.function.DoubleNFunction;
import com.landawn.abacus.util.function.DoubleTriFunction;
import com.landawn.abacus.util.function.FloatBiFunction;
import com.landawn.abacus.util.function.FloatNFunction;
import com.landawn.abacus.util.function.FloatTriFunction;
import com.landawn.abacus.util.function.IntBiFunction;
import com.landawn.abacus.util.function.IntNFunction;
import com.landawn.abacus.util.function.IntTriFunction;
import com.landawn.abacus.util.function.LongBiFunction;
import com.landawn.abacus.util.function.LongNFunction;
import com.landawn.abacus.util.function.LongTriFunction;
import com.landawn.abacus.util.function.ShortBiFunction;
import com.landawn.abacus.util.function.ShortNFunction;
import com.landawn.abacus.util.function.ShortTriFunction;
import com.landawn.abacus.util.function.ToByteFunction;
import com.landawn.abacus.util.function.ToCharFunction;
import com.landawn.abacus.util.function.ToFloatFunction;
import com.landawn.abacus.util.function.ToShortFunction;
import com.landawn.abacus.util.function.TriFunction;
import com.landawn.abacus.util.function.TriPredicate;
import com.landawn.abacus.util.stream.ObjIteratorEx.BufferedIterator;

// Tested performance with below code. It seems there is no meaningful performance improvement brought by Seq(CheckedStream), comparing with Stream.
// Remove the CheckedStream for now???
//    @Test
//    public void test_try_catch_perf() {
//        final int len = 1000_000;
//        final int loopNum = 100;
//
//        Profiler.run(1, loopNum, 3, "noTryCatch", () -> {
//            final long count = Stream.range(0, len).map(it -> notThrowSQLException()).count();
//
//            assertEquals(len, count);
//        }).printResult();
//
//        Profiler.run(1, loopNum, 3, "cmdWithTryCatch", () -> {
//            final long count = Stream.range(0, len).map(Fn.ff(it -> maybeThrowSQLException())).count();
//
//            assertEquals(len, count);
//        }).printResult();
//
//        Profiler.run(1, loopNum, 3, "cmdByCheckedStream", () -> {
//            try {
//                final long count = CheckedStream. range(0, len).map(it -> maybeThrowSQLException()).count();
//                assertEquals(len, count);
//            } catch (final SQLException e) {
//                throw ExceptionUtil.toRuntimeException(e, true);
//            }
//        }).printResult();
//
//    }
//
//    @SuppressWarnings("unused")
//    String maybeThrowSQLException() throws SQLException {
//        return "abc"; // Strings.uuid();
//    }
//
//    String notThrowSQLException() {
//        return "abc"; // Strings.uuid();
//    }

/**
 * Note: This class includes codes copied from StreamEx: StreamEx under Apache License, version 2.0.
 * 
* * The Stream class is an abstract class that represents a stream of elements and supports different kinds of computations. * The Stream operations are divided into intermediate and terminal operations, and are combined to form stream pipelines. * *
* The Stream will be automatically closed after a terminal method is called/triggered. *
*
* Refer to {@code com.landawn.abacus.util.stream.BaseStream} * * @param the type of the stream elements * @see com.landawn.abacus.util.stream.BaseStream * @see com.landawn.abacus.util.stream.EntryStream * @see com.landawn.abacus.util.stream.IntStream * @see com.landawn.abacus.util.stream.LongStream * @see com.landawn.abacus.util.stream.DoubleStream * @see com.landawn.abacus.util.stream.Collectors * @see com.landawn.abacus.util.Fn * @see com.landawn.abacus.util.Comparators * @see com.landawn.abacus.util.Seq */ @com.landawn.abacus.annotation.Immutable @LazyEvaluation @SuppressWarnings({ "java:S1192", "java:S1845" }) public abstract class Stream extends StreamBase, Consumer, Optional, Indexed, ObjIterator, Stream> { static final Random RAND = new SecureRandom(); private static final Splitter lineSplitter = Splitter.forLines(); private static final Splitter trimLineSplitter = Splitter.forLines().trimResults(); private static final Splitter omitEmptyLinesLineSplitter = Splitter.forLines().omitEmptyStrings(); private static final Splitter trimAndOmitEmptyLinesLineSplitter = Splitter.forLines().trimResults().omitEmptyStrings(); Stream(final boolean sorted, final Comparator cmp, final Collection closeHandlers) { super(sorted, cmp, closeHandlers); } /** * Returns a stream consisting of the elements of this stream that match the given predicate. * * @param predicate the condition to test the elements of the stream * @return a new Stream consisting of the elements that match the given predicate */ @ParallelSupported @IntermediateOp @Override public abstract Stream filter(Predicate predicate); /** * Returns a stream consisting of the elements of this stream that match the given predicate. * If an element does not match the predicate, the provided action {@code actionOnDroppedItem} is applied to that element. * * This is an intermediate operation. * * @param predicate the condition to test the elements of the stream * @param actionOnDroppedItem the action to perform on the elements that do not match the predicate. * This action is only applied to the elements that do not match the predicate and pulled by downstream/terminal operation. * @return a new Stream consisting of the elements that match the given predicate */ @Beta @ParallelSupported @IntermediateOp @Override public abstract Stream filter(Predicate predicate, Consumer actionOnDroppedItem); /** * Keeps the elements until the given predicate returns {@code false}. * The stream should be sorted, which means if x is the first element: {@code predicate.test(x)} returns {@code false}, any element y behind x: {@code predicate.test(y)} should return {@code false}. * * In parallel Streams, the elements after the first element which {@code predicate} returns {@code false} may be tested by predicate too. * *
* For example: *
     * 
     * // For sequential stream:
     * Stream.of(1, 2, 3, 4, 5, 6).takeWhile(it -> it < 5).toList() ===> [1, 2, 3, 4]
     * Stream.of(1, 2, 5, 6, 3, 4).takeWhile(it -> it < 5).toList() ===> [1, 2]
     * Stream.of(5, 6, 1, 2, 3, 4).takeWhile(it -> it < 5).toList() ===> []
     *
     *
     * // For parallel stream:
     * Stream.of(1, 2, 3, 4, 5, 6).parallel().takeWhile(it -> it < 5).toList() ===> [1, 2, 3, 4] // Order could be different since it's in parallel stream.
     * Stream.of(1, 2, 5, 6, 3, 4).parallel().takeWhile(it -> it < 5).toList() ===> [1, 2] // or [1, 2, 3] or [1, 2, 3, 4] // Order could be different since it's in parallel stream.
     * Stream.of(5, 6, 1, 2, 3, 4).parallel().takeWhile(it -> it < 5).toList() ===> any sub set of [1, 2, 3, 4], including [] // Order could be different since it's in parallel stream.
     * 
     * 
* * @param predicate * @return */ @ParallelSupported @IntermediateOp @Override public abstract Stream takeWhile(Predicate predicate); /** * Removes the elements until the given predicate returns {@code false}. * The stream should be sorted, which means if x is the first element: {@code predicate.test(x)} returns {@code true}, any element y behind x: {@code predicate.test(y)} should return {@code true}. * * In parallel Streams, the elements after the first element which {@code predicate} returns {@code false} may be tested by predicate too. * *
* For example: *
     * // For sequential stream:
     * Stream.of(1, 2, 3, 4, 5, 6).dropWhile(it -> it < 4).toList() ===> [4, 5, 6]
     * Stream.of(1, 2, 5, 6, 3, 4).dropWhile(it -> it < 4).toList() ===> [5, 6, 3, 4]
     * Stream.of(5, 6, 1, 2, 3, 4).dropWhile(it -> it < 4).toList() ===> [5, 6, 1, 2, 3, 4]
     *
     *
     * // For parallel stream:
     * Stream.of(1, 2, 3, 4, 5, 6).parallel().dropWhile(it -> it < 4).toList() ===> [4, 5, 6] // Order could be different since it's in parallel stream.
     * Stream.of(1, 2, 5, 6, 3, 4).parallel().dropWhile(it -> it < 4).toList() ===> [5, 6, 4] // or [5, 6, 3, 4] // Order could be different since it's in parallel stream.
     * Stream.of(5, 6, 1, 2, 3, 4).parallel().dropWhile(it -> it < 4).toList() ===> [5, 6] + any sub set of [1, 2, 3, 4] // Order could be different since it's in parallel stream.
     * 
* * @param predicate * @return */ @ParallelSupported @IntermediateOp @Override public abstract Stream dropWhile(Predicate predicate); /** * Removes the elements of this stream until the given predicate returns {@code false}. * The stream should be sorted, which means if x is the first element: {@code predicate.test(x)} returns {@code true}, any element y behind x: {@code predicate.test(y)} should return {@code true}. * * In parallel Streams, the elements after the first element which {@code predicate} returns {@code false} may be tested by predicate too. * * @param predicate the condition to test the elements of the stream * @param actionOnDroppedItem the action to perform on the elements that do not match the predicate * This action is only applied to the elements that do not match the predicate and pulled by downstream/terminal operation. * @return a new Stream consisting of the remaining elements after the elements that do not match the predicate have been removed */ @Beta @ParallelSupported @IntermediateOp @Override public abstract Stream dropWhile(Predicate predicate, Consumer actionOnDroppedItem); /** * Skips elements in the stream until the given predicate returns {@code true}. * The stream should be sorted, which means if x is the first element: {@code predicate.test(x)} returns {@code true}, any element y behind x: {@code predicate.test(y)} should return {@code true}. * * In parallel Streams, the elements after the first element which {@code predicate} returns {@code true} may be tested by predicate too. * * @param predicate the condition to test the elements of the stream * @return a new Stream consisting of the remaining elements after the elements that do not match the predicate have been skipped * @see #dropWhile(Predicate) */ @Beta @ParallelSupported @IntermediateOp @Override public abstract Stream skipUntil(Predicate predicate); /** * Performs the given action on the elements pulled by downstream/terminal operation. * This is an intermediate operation. * *
* Same as {@code peek}. * * @param action The action to be performed on the elements pulled by downstream/terminal operation * @return A new Stream consisting of the elements of this stream with the provided action applied to each element. * @see #peek(Consumer) */ @Beta @ParallelSupported @IntermediateOp @Override public abstract Stream onEach(Consumer action); /** * Performs the provided action on the elements pulled by downstream/terminal operation. Mostly it's used for debugging * This is an intermediate operation. * * @param action The action to be performed on the elements pulled by downstream/terminal operation * @return A new Stream consisting of the elements of this stream with the provided action applied to each element. * @see #onEach(Consumer) */ @ParallelSupported @IntermediateOp @Override public Stream peek(final Consumer action) { return onEach(action); } /** * Selects the elements that belong to the specified {@code targetType}, including its subtypes. * This is an intermediate operation. * * @param the type of the elements to be selected * @param targetType the class of the type to be selected * @return a new Stream containing elements of the specified type */ @SequentialOnly @IntermediateOp public abstract Stream select(final Class targetType); /** * Pairs each element in the stream with the result of applying the provided function to that element. * This is an intermediate operation. * * @param The type of the paired value. * @param extractor The function to be applied to each element in the stream. * @return A new Stream of Pairs, where each Pair consists of an element from the original stream and its corresponding value obtained by applying the extractor function. */ @ParallelSupported @IntermediateOp public abstract Stream> pairWith(final Function extractor); /** * Transforms the elements in the stream by applying a function to each element. * This is an intermediate operation. * * @param The type of the result elements. * @param mapper The function to be applied to each element in the stream. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract Stream map(final Function mapper); /** * Transforms the elements in the stream by applying a function to each element if it's not {@code null}, otherwise skips it. * This is an intermediate operation. * *
* @implSpec it's equivalent to {@code skipNulls().map(mapper)}. * * @param the type of the result elements * @param mapper the function to be applied to each element in the stream * @return a new Stream consisting of the results of applying the given function to the elements of this stream, skipping {@code null} elements */ @Beta @ParallelSupported @IntermediateOp public abstract Stream mapIfNotNull(final Function mapper); /** * Applies a sliding map operation on the stream. * It takes a BiFunction as an argument which is applied to each pair of consecutive elements in the stream. * {@code Null} will be passed as the second argument to the BiFunction for last non-paired element(s) in the stream. * The result is a new stream consisting of the results of applying the BiFunction. * * @param the type of the output stream * @param mapper a BiFunction that takes two consecutive elements and produces a new element of type R * @return a new Stream consisting of the results of applying the BiFunction to each pair of consecutive elements */ @ParallelSupported @IntermediateOp public abstract Stream slidingMap(BiFunction mapper); /** * Applies a sliding map operation on the stream with a specified increment. * It takes an integer as the increment and a BiFunction as an argument which is applied to each pair of consecutive elements in the stream. * {@code Null} will be passed as the second argument to the BiFunction for last non-paired element(s) in the stream. * The result is a new stream consisting of the results of applying the BiFunction. * * @param the type of the output stream * @param increment the step size for sliding over the elements in the stream * @param mapper a BiFunction that takes two consecutive elements and produces a new element of type R * @return a new Stream consisting of the results of applying the BiFunction to each pair of consecutive elements */ @ParallelSupported @IntermediateOp public abstract Stream slidingMap(int increment, BiFunction mapper); /** * Applies a sliding map operation on the stream with a specified increment and a flag to ignore unpaired elements. * It takes an integer as the increment, a boolean to ignore unpaired elements, and a BiFunction as an argument which is applied to each pair of consecutive elements in the stream. * {@code Null} will be passed as the second argument to the BiFunction for last non-paired element(s) in the stream if the {@code ignoreNotPaired} flag is set to {@code false}. * The result is a new stream consisting of the results of applying the BiFunction. * * @param the type of the output stream * @param increment the step size for sliding over the elements in the stream * @param ignoreNotPaired a flag to indicate whether to ignore unpaired elements in the stream * @param mapper a BiFunction that takes two consecutive elements and produces a new element of type R * @return a new Stream consisting of the results of applying the BiFunction to each pair of consecutive elements */ @ParallelSupported @IntermediateOp public abstract Stream slidingMap(int increment, boolean ignoreNotPaired, BiFunction mapper); /** * Applies a sliding map operation on the stream. * It takes a TriFunction as an argument which is applied to each triplet of consecutive elements in the stream. * {@code Nulls} will be passed as the second and third arguments to the TriFunction for last non-paired element(s) in the stream. * The result is a new stream consisting of the results of applying the TriFunction. * * @param the type of the output stream * @param mapper a TriFunction that takes three consecutive elements and produces a new element of type R * @return a new Stream consisting of the results of applying the TriFunction to each triplet of consecutive elements */ @ParallelSupported @IntermediateOp public abstract Stream slidingMap(TriFunction mapper); /** * Applies a sliding map operation on the stream with a specified increment. * It takes an integer as the increment and a TriFunction as an argument which is applied to each triplet of consecutive elements in the stream. * {@code Nulls} will be passed as the second and third arguments to the TriFunction for last non-paired element(s) in the stream. * The result is a new stream consisting of the results of applying the TriFunction. * * @param the type of the output stream * @param increment the step size for sliding over the elements in the stream * @param mapper a TriFunction that takes three consecutive elements and produces a new element of type R * @return a new Stream consisting of the results of applying the TriFunction to each triplet of consecutive elements */ @ParallelSupported @IntermediateOp public abstract Stream slidingMap(int increment, TriFunction mapper); /** * Applies a sliding map operation on the stream with a specified increment and a flag to ignore unpaired elements. * It takes an integer as the increment, a boolean to ignore unpaired elements, and a TriFunction as an argument which is applied to each triplet of consecutive elements in the stream. * {@code Nulls} will be passed as the second and third arguments to the TriFunction for last non-paired element(s) in the stream if the {@code ignoreNotPaired} flag is set to {@code false}. * The result is a new stream consisting of the results of applying the TriFunction. * * @param the type of the output stream * @param increment the step size for sliding over the elements in the stream * @param ignoreNotPaired a flag to indicate whether to ignore unpaired elements in the stream * @param mapper a TriFunction that takes three consecutive elements and produces a new element of type R * @return a new Stream consisting of the results of applying the TriFunction to each triplet of consecutive elements */ @ParallelSupported @IntermediateOp public abstract Stream slidingMap(int increment, boolean ignoreNotPaired, TriFunction mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a range map operation on the stream. * It takes a BiPredicate and a BiFunction as arguments. The BiPredicate is used to determine if two consecutive elements belong to the same range. * The BiFunction is applied to the borders of each range in the stream. * Returns a stream consisting of results of applying the given function to the ranges created from the source elements. * *
     * 
     * Stream.of("a", "ab", "ac", "b", "c", "cb").rangeMap((a, b) -> b.startsWith(a), (a, b) -> a + "->" + b).toList(); // a->ac, b->b, c->cb
     * 
     * 
* * * @param the type of the resulting elements * @param sameRange a non-interfering, stateless predicate to apply to * the leftmost and next elements which returns {@code true} for elements * which belong to the same range. * @param mapper a non-interfering, stateless function to apply to the * range borders and produce the resulting element. If value was * not merged to the interval, then mapper will receive the same * value twice, otherwise it will receive the leftmost and the * rightmost values which were merged to the range. * @return * @see #collapse(BiPredicate, BiFunction) */ @SequentialOnly @IntermediateOp public abstract Stream rangeMap(final BiPredicate sameRange, final BiFunction mapper); /** * Applies a mapping function to the first element of the stream and returns a new stream. * The mapping function takes an element of type T and returns a new element of type T. * This is an intermediate operation. * *
* This method will always run sequentially, even in parallel stream. * * @param mapperForFirst the mapping function to apply to the first element * @return a new Stream consisting of the first element transformed by the mapping function and the rest of the elements unchanged */ @SequentialOnly @IntermediateOp public abstract Stream mapFirst(Function mapperForFirst); /** * Applies a mapping function to the first element of the stream and a different mapping function to the rest of the elements, * and returns a new stream consisting of the transformed elements. * This is an intermediate operation. * * @param the type of the output stream * @param mapperForFirst the mapping function to apply to the first element * @param mapperForElse the mapping function to apply to the rest of the elements * @return a new Stream consisting of the first element transformed by the first mapping function and the rest of the elements transformed by the second mapping function */ @ParallelSupported @IntermediateOp public abstract Stream mapFirstOrElse(Function mapperForFirst, Function mapperForElse); /** * Applies a mapping function to the last element of the stream and returns a new stream. * The mapping function takes an element of type T and returns a new element of type T. * This is an intermediate operation. * *
* This method will always run sequentially, even in parallel stream. * * @param mapperForLast the mapping function to apply to the last element * @return a new Stream consisting of the last element transformed by the mapping function and the rest of the elements unchanged */ @SequentialOnly @IntermediateOp public abstract Stream mapLast(Function mapperForLast); /** * Applies a mapping function to the last element of the stream and a different mapping function to the rest of the elements, * and returns a new stream consisting of the transformed elements. * This is an intermediate operation. * * @param the type of the output stream * @param mapperForLast the mapping function to apply to the last element * @param mapperForElse the mapping function to apply to the rest of the elements * @return a new Stream consisting of the last element transformed by the first mapping function and the rest of the elements transformed by the second mapping function */ @ParallelSupported @IntermediateOp public abstract Stream mapLastOrElse(Function mapperForLast, Function mapperForElse); /** * Transforms this stream to a {@code CharStream} by applying the specified {@code ToCharFunction} to each element. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new CharStream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract CharStream mapToChar(ToCharFunction mapper); /** * Transforms this stream to a {@code ByteStream} by applying the specified {@code ToByteFunction} to each element. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new ByteStream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract ByteStream mapToByte(ToByteFunction mapper); /** * Transforms this stream to a {@code ShortStream} by applying the specified {@code ToShortFunction} to each element. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new ShortStream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract ShortStream mapToShort(ToShortFunction mapper); /** * Transforms this stream to an {@code IntStream} by applying the specified {@code ToIntFunction} to each element. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new IntStream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract IntStream mapToInt(ToIntFunction mapper); /** * Transforms this stream to a {@code LongStream} by applying the specified {@code ToLongFunction} to each element. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new LongStream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract LongStream mapToLong(ToLongFunction mapper); /** * Transforms this stream to a {@code FloatStream} by applying the specified {@code ToFloatFunction} to each element. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new FloatStream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract FloatStream mapToFloat(ToFloatFunction mapper); /** * Transforms this stream to a {@code DoubleStream} by applying the specified {@code ToDoubleFunction} to each element. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new DoubleStream consisting of the results of applying the given function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract DoubleStream mapToDouble(ToDoubleFunction mapper); // public abstract EntryStream mapToEntry(); /** * Transforms the elements in the stream into Map.Entry instances by applying the provided function to each element. * This is an intermediate operation. * * @param The type of the key in the Map.Entry. * @param The type of the value in the Map.Entry. * @param mapper The function to be applied to each element in the stream, which should return a Map.Entry instance. * @return A new EntryStream consisting of Map.Entry instances obtained by applying the mapper function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract EntryStream mapToEntry(Function> mapper); /** * Transforms the elements in the stream into Map.Entry instances by applying the provided key and value mapping functions to each element. * This is an intermediate operation. * * @param The type of the key in the Map.Entry. * @param The type of the value in the Map.Entry. * @param keyMapper The function to be applied to each element in the stream to generate the key. * @param valueMapper The function to be applied to each element in the stream to generate the value. * @return A new EntryStream consisting of Map.Entry instances obtained by applying the key and value mapping functions to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract EntryStream mapToEntry(Function keyMapper, Function valueMapper); // public abstract Stream mapp(Function> mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a Stream of elements. * This is an intermediate operation. * * @param The type of the elements in the returned Stream. * @param mapper The function to be applied to each element in the stream. * @return A new Stream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped stream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract Stream flatMap(Function> mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a Collection of elements. * This is an intermediate operation. * * @param The type of the elements in the returned Collection. * @param mapper The function to be applied to each element in the stream. * @return A new Stream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped collection produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract Stream flatmap(Function> mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return an array of elements. * This is an intermediate operation. * * @param The type of the elements in the returned array. * @param mapper The function to be applied to each element in the stream. * @return A new Stream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped array produced by applying the provided mapping function to each element. */ @Beta @ParallelSupported @IntermediateOp public abstract Stream flattMap(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a java.util.stream.Stream of elements. * This is an intermediate operation. * * @param The type of the elements in the returned Stream. * @param mapper The function to be applied to each element in the stream. * @return A new Stream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped stream produced by applying the provided mapping function to each element. */ @Beta @ParallelSupported @IntermediateOp public abstract Stream flattmap(Function> mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a CharStream of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new CharStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped CharStream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract CharStream flatMapToChar(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a char array of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new CharStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped char array produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract CharStream flatmapToChar(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a ByteStream of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new ByteStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped ByteStream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract ByteStream flatMapToByte(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a byte array of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new ByteStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped byte array produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract ByteStream flatmapToByte(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a ShortStream of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new ShortStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped ShortStream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract ShortStream flatMapToShort(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a short array of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new ShortStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped short array produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract ShortStream flatmapToShort(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return an IntStream of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new IntStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped IntStream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract IntStream flatMapToInt(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return an array of int elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new IntStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped int array produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract IntStream flatmapToInt(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a LongStream of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new LongStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped LongStream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract LongStream flatMapToLong(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a long array of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new LongStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped long array produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract LongStream flatmapToLong(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a FloatStream of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new FloatStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped FloatStream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract FloatStream flatMapToFloat(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a float array of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new FloatStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped float array produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract FloatStream flatmapToFloat(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a DoubleStream of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new DoubleStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped DoubleStream produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract DoubleStream flatMapToDouble(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a double array of elements. * This is an intermediate operation. * * @param mapper The function to be applied to each element in the stream. * @return A new DoubleStream consisting of the elements obtained by replacing each element of this stream * with the contents of a mapped double array produced by applying the provided mapping function to each element. */ @ParallelSupported @IntermediateOp public abstract DoubleStream flatmapToDouble(Function mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a Stream of Map.Entry instances. * This is an intermediate operation. * * @param The type of the key in the Map.Entry. * @param The type of the value in the Map.Entry. * @param mapper The function to be applied to each element in the stream. * @return A new EntryStream consisting of Map.Entry instances obtained by applying the mapper function to the elements of this stream. */ @ParallelSupported @IntermediateOp public abstract EntryStream flatMapToEntry(Function>> mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return a Map of key-value pairs. * This is an intermediate operation. * * @param The type of the key in the Map. * @param The type of the value in the Map. * @param mapper The function to be applied to each element in the stream. * @return A new EntryStream consisting of Map.Entry instances obtained by transforming each element of this stream * into a Map and then flattening these Maps into a stream of their entries. */ @ParallelSupported @IntermediateOp public abstract EntryStream flatmapToEntry(Function> mapper); /** * Transforms the elements in the stream by applying a function to each element. * The function should return an EntryStream of key-value pairs. * This is an intermediate operation. * * @param The type of the key in the EntryStream. * @param The type of the value in the EntryStream. * @param mapper The function to be applied to each element in the stream. * @return A new EntryStream consisting of key-value pairs obtained by transforming each element of this stream * into an EntryStream and then flattening these EntryStreams into a single EntryStream. */ @Beta @ParallelSupported @IntermediateOp public abstract EntryStream flattMapToEntry(Function> mapper); //NOSONAR /** * Applies a flat mapping operation on the stream, but only for {@code non-null} elements. * It takes a Function as an argument which is applied to each {@code non-null} element in the stream. * The Function returns a Collection of new elements. Each element in these collections is included in the resulting stream. * * This is an intermediate operation. *
* @implNote it's equivalent to: {@code skipNulls().flatmap(mapper)} * * @param the type of elements in the resulting stream * @param mapper a Function that takes an element and produces a Collection of new elements * @return a new Stream consisting of the results of applying the Function to each {@code non-null} element */ @Beta @ParallelSupported @IntermediateOp public abstract Stream flatmapIfNotNull(final Function> mapper); /** * Applies a flat mapping operation on the stream, but only for {@code non-null} elements. * It takes two Functions as arguments which are applied to each {@code non-null} element in the stream. * The first Function returns a Collection of intermediate elements. * The second Function is then applied to each intermediate element, producing a Collection of new elements. * Each element in these collections is included in the resulting stream. * * This is an intermediate operation. *
* @implNote it's equivalent to: {@code skipNulls().flatmap(mapper).skipNulls().flatmap(mapper2)} * * @param the type of the intermediate elements * @param the type of elements in the resulting stream * @param mapper a Function that takes an element and produces a Collection of intermediate elements * @param mapper2 a Function that takes an intermediate element and produces a Collection of new elements * @return a new Stream consisting of the results of applying the Functions to each {@code non-null} element */ @Beta @ParallelSupported @IntermediateOp public abstract Stream flatmapIfNotNull(final Function> mapper, final Function> mapper2); /** * Applies a mapping operation on the stream with multiple output elements for each input element. * It takes a BiConsumer as an argument which is applied to each element in the stream and a Consumer that accepts multiple output elements. * The result is a new stream consisting of all output elements produced by the BiConsumer for each input element. * * This is an intermediate operation. * * @param the type of the output stream * @param mapper a BiConsumer that takes an input element and a Consumer for output elements, and produces multiple output elements for each input element. * @return a new Stream consisting of all output elements produced by the BiConsumer for each input element. */ @Beta @ParallelSupported @IntermediateOp public abstract Stream mapMulti(BiConsumer> mapper); /** * Applies a mapping operation on the stream with multiple output integers for each input element. * It takes a BiConsumer as an argument which is applied to each element in the stream and an IntConsumer that accepts multiple output integers. * The result is a new IntStream consisting of all output integers produced by the BiConsumer for each input element. * * This is an intermediate operation. * * @param mapper a BiConsumer that takes an input element and an IntConsumer for output integers, and produces multiple output integers for each input element. * @return a new IntStream consisting of all output integers produced by the BiConsumer for each input element. */ @Beta @ParallelSupported @IntermediateOp public abstract IntStream mapMultiToInt(BiConsumer mapper); /** * Applies a mapping operation on the stream with multiple output longs for each input element. * It takes a BiConsumer as an argument which is applied to each element in the stream and a LongConsumer that accepts multiple output longs. * The result is a new LongStream consisting of all output longs produced by the BiConsumer for each input element. * * This is an intermediate operation. * * @param mapper a BiConsumer that takes an input element and a LongConsumer for output longs, and produces multiple output longs for each input element. * @return a new LongStream consisting of all output longs produced by the BiConsumer for each input element. */ @Beta @ParallelSupported @IntermediateOp public abstract LongStream mapMultiToLong(BiConsumer mapper); /** * Applies a mapping operation on the stream with multiple output doubles for each input element. * It takes a BiConsumer as an argument which is applied to each element in the stream and a DoubleConsumer that accepts multiple output doubles. * The result is a new DoubleStream consisting of all output doubles produced by the BiConsumer for each input element. * * This is an intermediate operation. * * @param mapper a BiConsumer that takes an input element and a DoubleConsumer for output doubles, and produces multiple output doubles for each input element. * @return a new DoubleStream consisting of all output doubles produced by the BiConsumer for each input element. */ @Beta @ParallelSupported @IntermediateOp public abstract DoubleStream mapMultiToDouble(BiConsumer mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns an Optional. If the Optional is empty, the element is not included in the resulting stream. * If the Optional contains a value, that value is included in the resulting stream. * * @param the type of the output stream * @param mapper a Function that takes an element and produces an Optional of a new element of type R * @return a new Stream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract Stream mapPartial(Function> mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns an OptionalInt. If the OptionalInt is empty, the element is not included in the resulting IntStream. * If the OptionalInt contains a value, that value is included in the resulting IntStream. * * @param mapper a Function that takes an element and produces an OptionalInt of a new element * @return a new IntStream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract IntStream mapPartialToInt(Function mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns an OptionalLong. If the OptionalLong is empty, the element is not included in the resulting LongStream. * If the OptionalLong contains a value, that value is included in the resulting LongStream. * * @param mapper a Function that takes an element and produces an OptionalLong of a new element * @return a new LongStream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract LongStream mapPartialToLong(Function mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns an OptionalDouble. If the OptionalDouble is empty, the element is not included in the resulting DoubleStream. * If the OptionalDouble contains a value, that value is included in the resulting DoubleStream. * * @param mapper a Function that takes an element and produces an OptionalDouble of a new element * @return a new DoubleStream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract DoubleStream mapPartialToDouble(Function mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns a java.util.Optional. If the Optional is empty, the element is not included in the resulting stream. * If the Optional contains a value, that value is included in the resulting stream. * * This is an intermediate operation. * * @param the type of the output stream * @param mapper a Function that takes an element and produces a java.util.Optional of a new element of type R * @return a new Stream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract Stream mapPartialJdk(Function> mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns a java.util.OptionalInt. If the OptionalInt is empty, the element is not included in the resulting IntStream. * If the OptionalInt contains a value, that value is included in the resulting IntStream. * * This is an intermediate operation. * * @param mapper a Function that takes an element and produces a java.util.OptionalInt of a new element * @return a new IntStream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract IntStream mapPartialToIntJdk(Function mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns a java.util.OptionalLong. If the OptionalLong is empty, the element is not included in the resulting LongStream. * If the OptionalLong contains a value, that value is included in the resulting LongStream. * * This is an intermediate operation. * * @param mapper a Function that takes an element and produces a java.util.OptionalLong of a new element * @return a new LongStream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract LongStream mapPartialToLongJdk(Function mapper); /** * Note: copied from StreamEx: StreamEx *
* * Applies a partial mapping operation on the stream. * It takes a Function as an argument which is applied to each element in the stream. * The Function returns a java.util.OptionalDouble. If the OptionalDouble is empty, the element is not included in the resulting DoubleStream. * If the OptionalDouble contains a value, that value is included in the resulting DoubleStream. * * This is an intermediate operation. * * @param mapper a Function that takes an element and produces a java.util.OptionalDouble of a new element * @return a new DoubleStream consisting of the results of applying the Function to each element */ @Beta @ParallelSupported @IntermediateOp public abstract DoubleStream mapPartialToDoubleJdk(Function mapper); /** * Groups the elements of the stream by the classification function provided. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @return A new Stream consisting of Map.Entry instances where the key is the group identifier and the value is a list of elements belonging to that group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream>> groupBy(final Function keyMapper); /** * Groups the elements of the stream by applying a key mapping function to each element. * The result is a Stream where each entry's key is the group identifier (determined by the key mapping function), * and the value is a list of elements belonging to that group. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @return A new Stream consisting of entries where the key is the group identifier and the value is a list of elements belonging to that group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream>> groupBy(final Function keyMapper, final Supplier>> mapFactory); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element. * The result is a Stream where each entry's key is the group identifier (determined by the key mapping function), * and the value is a list of elements that mapped to the corresponding key by the value mapping function. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The type of the value in the resulting Map.Entry. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @return A new Stream consisting of entries where the key is the group identifier and the value is a list of elements that mapped to the corresponding key. * @see Collectors#toMultimap(Function, Function) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream>> groupBy(Function keyMapper, Function valueMapper); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element. * The result is a Stream where each entry's key is the group identifier (determined by the key mapping function), * and the value is a list of elements that mapped to the corresponding key by the value mapping function. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The type of the value in the resulting Map.Entry. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new Stream consisting of entries where the key is the group identifier and the value is a list of elements that mapped to the corresponding key. * @see Collectors#toMultimap(Function, Function, Supplier) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream>> groupBy(Function keyMapper, Function valueMapper, Supplier>> mapFactory); /** * Groups the elements of the stream by the classification function provided and then applies a Collector to the elements of each group. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param downstream The Collector to be applied to the elements of each group. * @return A new Stream consisting of Map.Entry instances where the key is the group identifier and the value is the result of applying the Collector to the elements of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream> groupBy(Function keyMapper, Collector downstream); /** * Groups the elements of the stream by the classification function provided and then applies a Collector to the elements of each group. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param downstream The Collector to be applied to the elements of each group. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new Stream consisting of Map.Entry instances where the key is the group identifier and the value is the result of applying the Collector to the elements of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream> groupBy(Function keyMapper, Collector downstream, Supplier> mapFactory); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element, * and then applies a Collector to the values of each group. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The type of the value to be collected in the resulting Map.Entry. * @param The result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @param downstream The Collector to be applied to the values of each group. * @return A new Stream consisting of Map.Entry instances where the key is the group identifier and the value is the result of applying the Collector to the values of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream> groupBy(Function keyMapper, Function valueMapper, Collector downstream); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element, * and then applies a Collector to the values of each group. The result is a Stream where each entry's key is the group identifier * (determined by the key mapping function), and the value is the result of applying the Collector to the values of the group. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The type of the value to be collected in the resulting Map.Entry. * @param The result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @param downstream The Collector to be applied to the values of each group. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new Stream consisting of Map.Entry instances where the key is the group identifier and the value is the result of applying the Collector to the values of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream> groupBy(Function keyMapper, Function valueMapper, Collector downstream, Supplier> mapFactory); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element, * and then merges the values of each group using a merge function. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The type of the value in the resulting Map.Entry. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @param mergeFunction The function to be applied for merging the values of each group. * @return A new Stream consisting of Map.Entry instances where the key is the group identifier and the value is the result of merging the values of the group using the merge function. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream> groupBy(Function keyMapper, final Function valueMapper, BinaryOperator mergeFunction); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element. * Then merges the values using the provided merge function. * This is an intermediate operation. * * @param The type of the key in the resulting Map.Entry. * @param The type of the value in the resulting Map.Entry. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @param mergeFunction The function to be used for merging values in case of key collision. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new Stream consisting of Map.Entry instances where the key is the group identifier and the value is the result of applying the value mapping function to the elements of the group and merging them using the merge function. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream> groupBy(Function keyMapper, Function valueMapper, BinaryOperator mergeFunction, Supplier> mapFactory); // @ParallelSupported // public abstract Stream>> flatGroupBy(final Throwables.Function, E> flatKeyExtractor); // // @ParallelSupported // public abstract Stream>> flatGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Supplier>> mapFactory); // // /** // * // * @param flatKeyExtractor // * @param valueMapper // * @return // * @see Collectors#toMultimap(Function, Function) // */ // @ParallelSupported // public abstract Stream>> flatGroupBy(Throwables.Function, E> flatKeyExtractor, // Throwables.BiFunction valueMapper); // // /** // * // * @param flatKeyExtractor // * @param valueMapper // * @param mapFactory // * @return // * @see Collectors#toMultimap(Function, Function, Supplier) // */ // @ParallelSupported // public abstract Stream>> flatGroupBy(Throwables.Function, E> flatKeyExtractor, // Throwables.BiFunction valueMapper, Supplier>> mapFactory); // // @ParallelSupported // public abstract Stream> flatGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Collector downstream); // // @ParallelSupported // public abstract Stream> flatGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Collector downstream, final Supplier> mapFactory); // // @ParallelSupported // public abstract Stream> flatGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final Collector downstream); // // @ParallelSupported // public abstract Stream> flatGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final Collector downstream, // final Supplier> mapFactory); // // @ParallelSupported // public abstract Stream> flatGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, BinaryOperator mergeFunction); // // @ParallelSupported // public abstract Stream> flatGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final BinaryOperator mergeFunction, // final Supplier> mapFactory); // // @ParallelSupported // public abstract Stream>> flattGroupBy(final Throwables.Function, E> flatKeyExtractor); // // @ParallelSupported // public abstract Stream>> flattGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Supplier>> mapFactory); // // /** // * // * @param flatKeyExtractor // * @param valueMapper // * @return // * @see Collectors#toMultimap(Function, Function) // */ // @ParallelSupported // public abstract Stream>> flattGroupBy(Throwables.Function, E> flatKeyExtractor, // Throwables.BiFunction valueMapper); // // /** // * // * @param flatKeyExtractor // * @param valueMapper // * @param mapFactory // * @return // * @see Collectors#toMultimap(Function, Function, Supplier) // */ // @ParallelSupported // public abstract Stream>> flattGroupBy(Throwables.Function, E> flatKeyExtractor, // Throwables.BiFunction valueMapper, Supplier>> mapFactory); // // @ParallelSupported // public abstract Stream> flattGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Collector downstream); // // @ParallelSupported // public abstract Stream> flattGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Collector downstream, final Supplier> mapFactory); // // @ParallelSupported // public abstract Stream> flattGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final Collector downstream); // // @ParallelSupported // public abstract Stream> flattGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final Collector downstream, // final Supplier> mapFactory); // // @ParallelSupported // public abstract Stream> flattGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, BinaryOperator mergeFunction); // // @ParallelSupported // public abstract Stream> flattGroupBy(final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final BinaryOperator mergeFunction, // final Supplier> mapFactory); /** * Groups the elements of the stream by applying a key mapping function to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is a list of elements belonging to that group. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is a list of elements belonging to that group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream> groupByToEntry(Function keyMapper); /** * Groups the elements of the stream by applying a key mapping function to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is a list of elements belonging to that group. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is a list of elements belonging to that group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream> groupByToEntry(Function keyMapper, Supplier>> mapFactory); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is a list of elements that were mapped to that key. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the value in the resulting EntryStream. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is a list of elements that were mapped to that key. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream> groupByToEntry(Function keyMapper, Function valueMapper); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is a list of elements that were mapped to that key. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the value in the resulting EntryStream. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is a list of elements that were mapped to that key. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream> groupByToEntry(Function keyMapper, Function valueMapper, Supplier>> mapFactory); /** * Groups the elements of the stream by applying a key mapping function to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is the result of applying a Collector to the elements of the group. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the value in the resulting EntryStream. The type is determined by the result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param downstream The Collector to be applied to the elements of each group. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is the result of applying the Collector to the elements of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream groupByToEntry(Function keyMapper, Collector downstream); /** * Groups the elements of the stream by applying a key mapping function to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is the result of applying a Collector to the elements of the group. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the value in the resulting EntryStream. The type is determined by the result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param downstream The Collector to be applied to the elements of each group. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is the result of applying the Collector to the elements of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream groupByToEntry(Function keyMapper, Collector downstream, Supplier> mapFactory); /** * Groups the elements of the stream by applying a key mapping function, a value mapping function, and a Collector to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is the result of applying the Collector to the values of the group. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the intermediate values used in the Collector. * @param The type of the value in the resulting EntryStream. The type is determined by the result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value for the Collector. * @param downstream The Collector to be applied to the values of each group. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is the result of applying the Collector to the values of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream groupByToEntry(Function keyMapper, Function valueMapper, Collector downstream); /** * Groups the elements of the stream by applying a key mapping function, a value mapping function, and a Collector to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is the result of applying the Collector to the values of the group. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the intermediate values used in the Collector. * @param The type of the value in the resulting EntryStream. The type is determined by the result type of the Collector. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value for the Collector. * @param downstream The Collector to be applied to the values of each group. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is the result of applying the Collector to the values of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream groupByToEntry(Function keyMapper, Function valueMapper, Collector downstream, Supplier> mapFactory); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element. * The result is an EntryStream where each entry's key is the group identifier (determined by the key mapping function), * and the value is the result of applying a BinaryOperator to the values of the group. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the value in the resulting EntryStream. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value for the BinaryOperator. * @param mergeFunction The BinaryOperator to be applied to the values of each group. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is the result of applying the BinaryOperator to the values of the group. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream groupByToEntry(Function keyMapper, Function valueMapper, BinaryOperator mergeFunction); /** * Groups the elements of the stream by applying a key mapping function and a value mapping function to each element. * Then merges the values using the provided merge function. * This is an intermediate operation. * * @param The type of the key in the resulting EntryStream. * @param The type of the value in the resulting EntryStream. * @param keyMapper The function to be applied to each element in the stream to determine the group it belongs to. * @param valueMapper The function to be applied to each element in the stream to determine its value in the group. * @param mergeFunction The function to be used for merging values in case of key collision. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new EntryStream consisting of entries where the key is the group identifier and the value is the result of applying the value mapping function to the elements of the group and merging them using the merge function. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream groupByToEntry(Function keyMapper, Function valueMapper, BinaryOperator mergeFunction, Supplier> mapFactory); /** * Partitions the elements of the stream according to a Predicate. * The result is a Stream of Map.Entry where the key is a Boolean and the value is a List of elements. * If the Predicate returns {@code true} for an element, it is included in the List corresponding to the key {@code true} in the Map.Entry. * If the Predicate returns {@code false} for an element, it is included in the List corresponding to the key {@code false} in the Map.Entry. * The returned {@code Stream} always contains mappings for both {@code false} and {@code true} keys, even this stream is empty. * This is an intermediate operation and can be processed in parallel. * * @param predicate The Predicate to be used for partitioning the stream elements. * @return A new Stream consisting of Map.Entry where the key is a Boolean and the value is a List of elements. * @see Collectors#partitioningBy(Predicate) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream>> partitionBy(final Predicate predicate); /** * Partitions the elements of the stream according to a Predicate and a Collector. * The result is a Stream of Map.Entry where the key is a Boolean and the value is the result of applying the Collector to the elements of the group. * If the Predicate returns {@code true} for an element, it is included in the group corresponding to the key {@code true} in the Map.Entry. * If the Predicate returns {@code false} for an element, it is included in the group corresponding to the key {@code false} in the Map.Entry. * The returned {@code Stream} always contains mappings for both {@code false} and {@code true} keys, even this stream is empty. * This is an intermediate operation and can be processed in parallel. * * @param The type of the value in the resulting EntryStream. The type is determined by the result type of the Collector. * @param predicate The Predicate to be used for partitioning the stream elements. * @param downstream The Collector to be applied to the elements of each group. * @return A new Stream consisting of Map.Entry where the key is a Boolean and the value is the result of applying the Collector to the elements of the group. * @see Collectors#partitioningBy(Predicate, Collector) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream> partitionBy(final Predicate predicate, final Collector downstream); /** * Partitions the elements of the stream according to a Predicate. * The result is an EntryStream where the key is a Boolean and the value is a List of elements. * If the Predicate returns {@code true} for an element, it is included in the List corresponding to the key {@code true} in the EntryStream. * If the Predicate returns {@code false} for an element, it is included in the List corresponding to the key {@code false} in the EntryStream. * The returned {@code Stream} always contains mappings for both {@code false} and {@code true} keys, even this stream is empty. * This is an intermediate operation and can be processed in parallel. * * @param predicate The Predicate to be used for partitioning the stream elements. * @return A new EntryStream consisting of entries where the key is a Boolean and the value is a List of elements. * @see Collectors#partitioningBy(Predicate) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream> partitionByToEntry(final Predicate predicate); /** * Partitions the elements of the stream according to a Predicate and a Collector. * The result is an EntryStream where the key is a Boolean and the value is the result of applying the Collector to the elements of the group. * If the Predicate returns {@code true} for an element, it is included in the group corresponding to the key {@code true} in the EntryStream. * If the Predicate returns {@code false} for an element, it is included in the group corresponding to the key {@code false} in the EntryStream. * The returned {@code Stream} always contains mappings for both {@code false} and {@code true} keys, even this stream is empty. * This is an intermediate operation and can be processed in parallel. * * @param The type of the value in the resulting EntryStream. The type is determined by the result type of the Collector. * @param predicate The Predicate to be used for partitioning the stream elements. * @param downstream The Collector to be applied to the elements of each group. * @return A new EntryStream consisting of entries where the key is a Boolean and the value is the result of applying the Collector to the elements of the group. * @see Collectors#partitioningBy(Predicate, Collector) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract EntryStream partitionByToEntry(final Predicate predicate, final Collector downstream); /** * Counts the elements of the stream according to a Function that maps elements to keys. * The result is a Stream of Map.Entry where the key is the mapped key and the value is the count of elements that were mapped to this key. * This is an intermediate operation and can be processed in parallel. * * @param The type of the key in the resulting Stream. The type is determined by the result type of the Function. * @param keyMapper The Function to be used for mapping stream elements to keys. * @return A new Stream consisting of Map.Entry where the key is the mapped key and the value is the count of elements that were mapped to this key. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public Stream> countBy(final Function keyMapper) { return groupBy(keyMapper, Collectors.countingToInt()); } /** * Counts the elements of the stream according to a Function that maps elements to keys. * The result is a Stream of Map.Entry where the key is the mapped key and the value is the count of elements that were mapped to this key. * This is an intermediate operation and can be processed in parallel. * * @param The type of the key in the resulting Stream. The type is determined by the result type of the Function. * @param keyMapper The Function to be used for mapping stream elements to keys. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new Stream consisting of Map.Entry where the key is the mapped key and the value is the count of elements that were mapped to this key. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public Stream> countBy(final Function keyMapper, final Supplier> mapFactory) { return groupBy(keyMapper, Collectors.countingToInt(), mapFactory); } /** * Counts the elements of the stream according to a Function that maps elements to keys. * The result is an EntryStream where the key is the mapped key and the value is the count of elements that were mapped to this key. * This is an intermediate operation and can be processed in parallel. * * @param The type of the key in the resulting EntryStream. The type is determined by the result type of the Function. * @param keyMapper The Function to be used for mapping stream elements to keys. * @return A new EntryStream consisting of entries where the key is the mapped key and the value is the count of elements that were mapped to this key. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public EntryStream countByToEntry(final Function keyMapper) { return groupByToEntry(keyMapper, Collectors.countingToInt()); } /** * Counts the elements of the stream according to a Function that maps elements to keys. * The result is an EntryStream where the key is the mapped key and the value is the count of elements that were mapped to this key. * This is an intermediate operation and can be processed in parallel. * * @param The type of the key in the resulting EntryStream. The type is determined by the result type of the Function. * @param keyMapper The Function to be used for mapping stream elements to keys. * @param mapFactory The supplier providing a new empty Map into which the results will be inserted. * @return A new EntryStream consisting of entries where the key is the mapped key and the value is the count of elements that were mapped to this key. */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public EntryStream countByToEntry(final Function keyMapper, final Supplier> mapFactory) { return groupByToEntry(keyMapper, Collectors.countingToInt(), mapFactory); } /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a List. * The predicate takes two parameters: the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element and its previous element are considered as a series of adjacent elements. * These elements are then collapsed into a List. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the previous element when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * * @param collapsible a BiPredicate that takes two parameters: the previous element and the current element in the stream. * @return a new Stream where each element is a List of adjacent elements which satisfy the given predicate. */ @SequentialOnly @IntermediateOp public abstract Stream> collapse(final BiPredicate collapsible); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a Collection. * The predicate takes two parameters: the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element and its previous element are considered as a series of adjacent elements. * These elements are then collapsed into a Collection. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the previous element when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * * @param the type of the Collection into which the adjacent elements will be collapsed. * @param collapsible a BiPredicate that takes two parameters: the previous element and the current element in the stream. * @param supplier a Supplier that generates the Collection into which the adjacent elements will be collapsed. * @return a new Stream where each element is a Collection of adjacent elements which satisfy the given predicate. */ @SequentialOnly @IntermediateOp public abstract > Stream collapse(final BiPredicate collapsible, Supplier supplier); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate using the merger function and returns a new stream. * The predicate takes two parameters: the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element and its previous element are considered as a series of adjacent elements. * These elements are then merged using the provided BiFunction. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the previous element when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * *

Example: *

     * 
     * Stream.of(new Integer[0]).collapse((p, c) -> p < c, (r, c) -> r + c) => []
     * Stream.of(1).collapse((p, c) -> p < c, (r, c) -> r + c) => [1]
     * Stream.of(1, 2).collapse((p, c) -> p < c, (r, c) -> r + c) => [3]
     * Stream.of(1, 2, 3).collapse((p, c) -> p < c, (r, c) -> r + c) => [6]
     * Stream.of(1, 2, 3, 3, 2, 1).collapse((p, c) -> p < c, (r, c) -> r + c) => [6, 3, 2, 1]
     * 
     * 
* * @param collapsible a BiPredicate that takes two parameters: the previous element and the current element in the stream. * @param mergeFunction a BiFunction that takes two parameters: the result of the previous merge operation (or the first element if no merge has been performed yet) and the current element, and returns the result of the merge operation. * @return a new Stream where each element is the result of merging adjacent elements which satisfy the given predicate. */ @SequentialOnly @IntermediateOp public abstract Stream collapse(final BiPredicate collapsible, final BiFunction mergeFunction); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a single element using the provided operation and returns a new stream. * The predicate takes two parameters: the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element and its previous element are considered as a series of adjacent elements. * These elements are then merged using the provided BiFunction. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the previous element when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * @param the type of the resulting elements. * @param collapsible a BiPredicate that takes two parameters: the previous element and the current element in the stream. * @param init the initial value to be used in the BiFunction for the first element if the predicate returns {@code true}. * @param mergeFunction a BiFunction that takes two parameters: the initial value or the result of the merge function from the previous step, and the current element. It returns a single element that represents the collapsed elements. * @return a new Stream where each element is the result of merging adjacent elements which satisfy the given predicate. */ @SequentialOnly @IntermediateOp public abstract Stream collapse(final BiPredicate collapsible, final U init, final BiFunction mergeFunction); // /** // * // * @param // * @param collapsible test the current element with its previous element. The first parameter is the previous element of current element, the second parameter is the current element. // * @param supplier // * @param accumulator // * @return // * @deprecated use {@linkplain #collapse(BiPredicate, Collector)} instead. 1, parameter position is inconsistent? {@code supplier} should be last parameter, 2 Too many overload methods? 3, not frequently used? // */ // @Deprecated // @SequentialOnly // @IntermediateOp // public abstract Stream collapse(final BiPredicate collapsible, final Supplier supplier, // final BiConsumer accumulator); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a single element using the provided collector and returns a new stream. * The predicate takes two parameters: the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element and its previous element are considered as a series of adjacent elements. * These elements are then collapsed into a single element using the provided collector. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the previous element when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * *

Example: *

     * 
     * Stream.of(new Integer[0]).collapse((p, c) -> p < c, Collectors.summingInt(Fn.unboxI())) => []
     * Stream.of(1).collapse((p, c) -> p < c, Collectors.summingInt(Fn.unboxI())) => [1]
     * Stream.of(1, 2).collapse((p, c) -> p < c, Collectors.summingInt(Fn.unboxI())) => [3]
     * Stream.of(1, 2, 3).collapse((p, c) -> p < c, Collectors.summingInt(Fn.unboxI())) => [6]
     * Stream.of(1, 2, 3, 3, 2, 1).collapse((p, c) -> p < c, Collectors.summingInt(Fn.unboxI())) => [6, 3, 2, 1]
     * 
     * 
* * @param collapsible a BiPredicate that takes two parameters: the previous element and the current element in the stream. * @param collector a Collector that collects the adjacent elements into a single element. * @return a new Stream where each element is the result of collapsing adjacent elements which satisfy the given predicate. */ @SequentialOnly @IntermediateOp public abstract Stream collapse(final BiPredicate collapsible, final Collector collector); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a single list and returns a new stream. * The predicate takes three parameters: the first element of the series, the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element, its previous element and the first element of the series are considered as a series of adjacent elements. * These elements are then collapsed into a single list. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the first and previous elements when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * * @param collapsible a TriPredicate that takes three parameters: the first element of the series, the previous element and the current element in the stream. * @return a new Stream where each element is a list that is the result of collapsing adjacent elements which satisfy the given predicate. */ @Beta @SequentialOnly @IntermediateOp public abstract Stream> collapse(final TriPredicate collapsible); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a single collection and returns a new stream. * The predicate takes three parameters: the first element of the series, the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element, its previous element and the first element of the series are considered as a series of adjacent elements. * These elements are then collapsed into a single collection. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the first and previous elements when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * * @param the type of the Collection into which the adjacent elements will be collapsed * @param collapsible a TriPredicate that takes three parameters: the first element of the series, the previous element and the current element in the stream. * @param supplier a Supplier that generates the collection into which the adjacent elements will be collapsed. * @return a new Stream where each element is a collection that is the result of collapsing adjacent elements which satisfy the given predicate. */ @Beta @SequentialOnly @IntermediateOp public abstract > Stream collapse(final TriPredicate collapsible, Supplier supplier); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a single element and returns a new stream. * The predicate takes three parameters: the first element of the series, the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element, its previous element and the first element of the series are considered as a series of adjacent elements. * These elements are then collapsed into a single element using the provided merge function. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the first and previous elements when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * *

Example: *

     * 
     * Stream.of(new Integer[0]).collapse((f, p, c) -> f < c, (r, c) -> r + c) => []
     * Stream.of(1).collapse((f, p, c) -> f < c, (r, c) -> r + c) => [1]
     * Stream.of(1, 2).collapse((f, p, c) -> f < c, (r, c) -> r + c) => [3]
     * Stream.of(1, 2, 3).collapse((f, p, c) -> f < c, (r, c) -> r + c) => [6]
     * Stream.of(1, 2, 3, 3, 2, 1).collapse((f, p, c) -> f < c, (r, c) -> r + c) => [11, 1]
     * 
     * 
* * @param collapsible a TriPredicate that takes three parameters: the first element of the series, the previous element and the current element in the stream. * @param mergeFunction a BiFunction that takes two parameters: the current element and its previous element. It returns a single element that represents the collapsed elements. * @return a new Stream where each element is the result of collapsing adjacent elements which satisfy the given predicate. */ @Beta @SequentialOnly @IntermediateOp public abstract Stream collapse(final TriPredicate collapsible, final BiFunction mergeFunction); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a single element and returns a new stream. * The predicate takes three parameters: the first element of the series, the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element, its previous element and the first element of the series are considered as a series of adjacent elements. * These elements are then collapsed into a single element using the provided merge function. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the first and previous elements when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * * @param collapsible a TriPredicate that takes three parameters: the first element of the series, the previous element and the current element in the stream. * @param init the initial value to be used in the merge function for the first element in the series. * @param mergeFunction a BiFunction that takes two parameters: the initial value or the result of the merge function from the previous step, and the current element. It returns a single element that represents the collapsed elements. * @return a new Stream where each element is the result of collapsing adjacent elements which satisfy the given predicate. */ @Beta @SequentialOnly @IntermediateOp public abstract Stream collapse(final TriPredicate collapsible, final U init, final BiFunction mergeFunction); // /** // * // * @param // * @param collapsible test the current element with the first element and previous element in the series. The first parameter is the first element of this series, the second parameter is the previous element and the third parameter is the current element. // * @param supplier // * @param accumulator // * @return // * @deprecated use {@linkplain #collapse(TriPredicate, Collector)} instead. 1, parameter position is inconsistent? {@code supplier} should be last parameter, 2 Too many overload methods? 3, not frequently used? // */ // @Deprecated // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream collapse(final TriPredicate collapsible, final Supplier supplier, // final BiConsumer accumulator); /** * Merges a series of adjacent elements in the stream which satisfy the given predicate into a single element and returns a new stream. * The predicate takes three parameters: the first element of the series, the previous element and the current element in the stream. * If the predicate returns {@code true}, the current element, its previous element and the first element of the series are considered as a series of adjacent elements. * These elements are then collapsed into a single element using the provided collector. * * This is an intermediate operation, meaning it's always lazy. It doesn't start processing the data until a terminal operation is invoked on the stream pipeline. * It's also a stateful operation since it needs to remember the first and previous elements when processing the current element. * * This operation is not parallelizable and requires the stream to be ordered. * *

Example: *

     * 
     * Stream.of(new Integer[0]).collapse((f, p, c) -> f < c, Collectors.summingInt(Fn.unboxI())) => []
     * Stream.of(1).collapse((f, p, c) -> f < c, Collectors.summingInt(Fn.unboxI())) => [1]
     * Stream.of(1, 2).collapse((f, p, c) -> f < c, Collectors.summingInt(Fn.unboxI())) => [3]
     * Stream.of(1, 2, 3).collapse((f, p, c) -> f < c, Collectors.summingInt(Fn.unboxI())) => [6]
     * Stream.of(1, 2, 3, 3, 2, 1).collapse((f, p, c) -> f < c, Collectors.summingInt(Fn.unboxI())) => [11, 1]
     * 
     * 
* * @param collapsible a TriPredicate that takes three parameters: the first element of the series, the previous element and the current element in the stream. * @param collector a Collector that collects the elements into a single result container. * @return a new Stream where each element is the result of collapsing adjacent elements which satisfy the given predicate. */ @Beta @SequentialOnly @IntermediateOp public abstract Stream collapse(final TriPredicate collapsible, final Collector collector); /** * Performs a scan (also known as prefix sum, cumulative sum, running total, or integral) operation on the elements of the stream. * The scan operation takes a binary operator (the accumulator) and applies it cumulatively on the stream elements, * successively combining each element in order from the start to produce a stream of accumulated results. * * For example, given a stream of numbers [1, 2, 3, 4], and an accumulator that performs addition, * the output would be a stream of numbers [1, 3, 6, 10]. * * This is an intermediate operation. * This operation is sequential only, even when called on a parallel stream. * *

Example: *

     * 
     * Stream.of(new Integer[0]).scan((r, c) -> r + c) => []
     * Stream.of(1).scan((r, c) -> r + c) => [1]
     * Stream.of(1, 2).scan((r, c) -> r + c) => [1, 3]
     * Stream.of(1, 2, 3).scan((r, c) -> r + c) => [1, 3, 6]
     * Stream.of(1, 2, 3, 3, 2, 1).scan((r, c) -> r + c) => [1, 3, 6, 9, 11, 12]
     * 
     * 
* * @param accumulator a {@code BiFunction} that takes two parameters: the current accumulated value and the current stream element, and returns a new accumulated value. * @return a new {@code Stream} consisting of the results of the scan operation on the elements of the original stream. */ @SequentialOnly @IntermediateOp public abstract Stream scan(final BiFunction accumulator); /** * Performs a scan (also known as prefix sum, cumulative sum, running total, or integral) operation on the elements of the stream. * The scan operation takes an initial value and a binary operator (the accumulator) and applies it cumulatively on the stream elements, * successively combining each element in order from the start to produce a stream of accumulated results. * * For example, given a stream of numbers [1, 2, 3, 4], an initial value of 10, and an accumulator that performs addition, * the output would be a stream of numbers [11, 13, 16, 20]. * This is an intermediate operation. * This operation is sequential only, even when called on a parallel stream. * *

Example: *

     * 
     * Stream.of(new Integer[0]).scan(10, (r, c) -> r + c) => []
     * Stream.of(1).scan(10, (r, c) -> r + c) => [11]
     * Stream.of(1, 2).scan(10, (r, c) -> r + c) => [11, 13]
     * Stream.of(1, 2, 3).scan(10, (r, c) -> r + c) => [11, 13, 16]
     * Stream.of(1, 2, 3, 3, 2, 1).scan(10, (r, c) -> r + c) => [11, 13, 16, 19, 21, 22]
     * 
     * 
* * @param init the initial value. It's only used once by the accumulator to calculate the first element in the returned stream. * It will be ignored if this stream is empty and won't be the first element of the returned stream. * @param accumulator a {@code BiFunction} that takes two parameters: the current accumulated value and the current stream element, and returns a new accumulated value. * @return a new {@code Stream} consisting of the results of the scan operation on the elements of the original stream. * @see #scan(Object, boolean, BiFunction) */ @SequentialOnly @IntermediateOp public abstract Stream scan(final U init, final BiFunction accumulator); /** * Performs a scan (also known as prefix sum, cumulative sum, running total, or integral) operation on the elements of the stream. * The scan operation takes an initial value and a binary operator (the accumulator) and applies it cumulatively on the stream elements, * successively combining each element in order from the start to produce a stream of accumulated results. * *

Example: *

     * 
     * Stream.of(new Integer[0]).scan(10, false, (r, c) -> r + c) => []
     * Stream.of(new Integer[0]).scan(10, true, (r, c) -> r + c) => [10]
     * Stream.of(1, 2, 3).scan(10, false, (r, c) -> r + c) => [11, 13, 16]
     * Stream.of(1, 2, 3).scan(10, true, (r, c) -> r + c) => [10, 11, 13, 16]
     * 
     * 
* * This is an intermediate operation. * This operation is sequential only, even when called on a parallel stream. * * @param init the initial value. It's only used once by the accumulator to calculate the first element in the returned stream. * @param initIncluded a boolean value that determines if the initial value should be included as the first element in the returned stream. * @param accumulator a {@code BiFunction} that takes two parameters: the current accumulated value and the current stream element, and returns a new accumulated value. * @return a new {@code Stream} consisting of the results of the scan operation on the elements of the original stream. * @see #scan(Object, BiFunction) */ @SequentialOnly @IntermediateOp public abstract Stream scan(final U init, final boolean initIncluded, final BiFunction accumulator); // @SequentialOnly // @IntermediateOp // public Stream scanInclusive(final U init, final BiFunction accumulator) { // return scan(init, accumulator, true); // } /** * Returns a Stream of Lists, where each List contains a chunk of elements from the original stream. * The size of each chunk is specified by the chunkSize parameter. The final chunk may be smaller if there are not enough elements. *
* This method only runs sequentially, even in parallel stream. * * @param chunkSize the desired size of each chunk (the last chunk may be smaller) * @return a Stream of Lists, each containing a chunk of elements from the original stream * @see #split(int, IntFunction) */ @SequentialOnly @IntermediateOp public abstract Stream> split(final int chunkSize); // /** // * Returns Stream of Stream with consecutive sub streams of the elements, each of the same size (the final stream may be smaller). // * // *
// * This method only runs sequentially, even in parallel stream. // * // * @param chunkSize the desired size of each sub stream (the last may be smaller). // * @return a Stream of Sets, each containing a chunk of elements from the original stream // * @see #split(int, IntFunction) // */ // @SequentialOnly // @IntermediateOp // public abstract Stream> splitToSet(int chunkSize); /** * Returns a Stream of Collections, where each Collection contains a chunk of elements from the original stream. * The size of each chunk is specified by the chunkSize parameter. The final chunk may be smaller if there are not enough elements. *
* This method only runs sequentially, even in parallel stream. * * @param the type of the Collection to be returned * @param chunkSize the desired size of each chunk (the last chunk may be smaller) * @param collectionSupplier a function that provides a new collection of type C * @return a Stream of Collections, each containing a chunk of elements from the original stream */ @SequentialOnly @IntermediateOp public abstract > Stream split(int chunkSize, IntFunction collectionSupplier); /** * Splits the elements of the stream into sub-streams of the specified size, each collected by the provided collector. *
* This method only runs sequentially, even in parallel stream. * * @param The type of the result produced by the collector. * @param chunkSize The desired size of each sub stream (the last may be smaller). * @param collector The collector to be used for collecting elements of each sub stream. * @return A new Stream consisting of results produced by the collector for each sub stream. */ @SequentialOnly @IntermediateOp public abstract Stream split(int chunkSize, Collector collector); // /** // * Splits the elements of the stream into sets based on the provided predicate. // * This is an intermediate operation and can only be processed sequentially. // * // * @implSpec // * {@code Stream.range(0, 7).split(it -> it % 3 == 0) ==> [[0], [1, 2], [3], [4, 5], [6]]} // *
// * {@code Stream.of("a1", "a2", "b1", "b2").split(it -> it.startsWith("a")) ==> [[a1, a2], [b1, b2]]} // * // * @param predicate The condition to be used for splitting the stream into sets. // * @return A new Stream consisting of sets of elements from the original Stream. // */ // @SequentialOnly // @IntermediateOp // public abstract Stream> splitToSet(Predicate predicate); /** * Splits the elements of the stream into sub-streams based on the provided predicate. * Each sub-stream is collected into a List. *
* This method only runs sequentially, even in parallel stream. * * @implSpec * {@code Stream.range(0, 7).split(it -> it % 3 == 0) ==> [[0], [1, 2], [3], [4, 5], [6]]} *
* {@code Stream.of("a1", "a2", "b1", "b2").split(it -> it.startsWith("a")) ==> [[a1, a2], [b1, b2]]} * * @param predicate The condition to be used for splitting the stream into sub-streams. * @return A new Stream consisting of Lists of elements from the original Stream. */ @SequentialOnly @IntermediateOp public abstract Stream> split(final Predicate predicate); /** * Splits the elements of the stream into collections based on the provided predicate. *
* This method only runs sequentially, even in parallel stream. * * @implSpec * {@code Stream.range(0, 7).split(it -> it % 3 == 0) ==> [[0], [1, 2], [3], [4, 5], [6]]} *
* {@code Stream.of("a1", "a2", "b1", "b2").split(it -> it.startsWith("a")) ==> [[a1, a2], [b1, b2]]} * * @param The type of the collection to be used for collecting elements of each sub stream. * @param predicate The condition to be used for splitting the stream into collections. * @param collectionSupplier The supplier function to provide a new collection for each sub stream. * @return A new Stream consisting of collections of elements from the original Stream. */ @SequentialOnly @IntermediateOp public abstract > Stream split(Predicate predicate, Supplier collectionSupplier); /** * Splits the elements of the stream into separate streams based on the provided predicate. *
* This method only runs sequentially, even in parallel stream. * * @implSpec * {@code Stream.range(0, 7).split(it -> it % 3 == 0) ==> [[0], [1, 2], [3], [4, 5], [6]]} *
* {@code Stream.of("a1", "a2", "b1", "b2").split(it -> it.startsWith("a")) ==> [[a1, a2], [b1, b2]]} * * @param The type of the result produced by the collector. * @param predicate The condition to be used for splitting the stream. * @param collector The collector to be used for collecting elements of each chunk. * @return A new Stream consisting of elements from the original Stream collected by the provided collector. */ @SequentialOnly @IntermediateOp public abstract Stream split(Predicate predicate, Collector collector); /** * Splits the stream into two parts at the specified index. * The element at the specified index will be the first element of the second part * The first part will be loaded into memory. * *
* This method only runs sequentially, even in parallel stream. * * @implSpec * {@code Stream.range(0, 7).splitAt(3) ==> [[0, 1, 2], [3, 4, 5, 6]]} * * * @param where The index at which to split the stream. * @return A new Stream consisting of two sub-streams split at the given index. */ @SequentialOnly @IntermediateOp public abstract Stream> splitAt(int where); /** * Splits the stream into two parts at the specified index. * The element at the specified index will be the first element of the second part * The first part will be loaded into memory. * * This is an intermediate operation and can only be processed sequentially. * * @implSpec * {@code Stream.range(0, 7).splitAt(3) ==> [[0, 1, 2], [3, 4, 5, 6]]} * * @param The type of the result produced by the collector. * @param where The index at which to split the stream. The element at this index will be the first element of the second part. * @param collector The collector to be used for collecting elements of the first part of the stream. * @return A new Stream consisting of elements from the first part of the original Stream collected by the provided collector. */ @SequentialOnly @IntermediateOp public abstract Stream splitAt(int where, Collector collector); /** * Splits the stream into two parts based on the specified predicate. * The first part will be loaded into memory. *
* This method only runs sequentially, even in parallel stream. * * @implSpec * {@code Stream.range(0, 7).splitAt(it -> it == 4) ==> [[0, 1, 2, 3], [4, 5, 6]]} * @param where The predicate to determine where to split the stream. * @return A new Stream consisting of two sub-streams split at the point where the predicate is satisfied. */ @SequentialOnly @IntermediateOp public abstract Stream> splitAt(final Predicate where); /** * Splits the stream into two parts based on the provided predicate. The first part will be loaded into memory. *
* This method only runs sequentially, even in parallel stream. * * @implSpec * {@code Stream.range(0, 7).splitAt(it -> it == 4) ==> [[0, 1, 2, 3], [4, 5, 6]]} * * @param The type of the result produced by the collector. * @param where The condition at which to split the stream. * @param collector The collector to be used for collecting elements of the first part of the stream. * @return A new Stream consisting of elements from the first part of the original Stream collected by the provided collector. */ @SequentialOnly @IntermediateOp public abstract Stream splitAt(Predicate where, Collector collector); // /** // * Creates a sliding window over the elements of the Stream, where each window is a Set of elements. // * The window moves over the elements of the Stream according to the specified window size. // * This is an intermediate operation and can only be processed sequentially. // * // * @param windowSize The size of the window to be used for sliding over the Stream elements. // * @return A new Stream where each element is a Set of elements from the original Stream, representing a window. // * @see #sliding(int, int) // */ // @SequentialOnly // @IntermediateOp // public Stream> slidingToSet(final int windowSize) { // return slidingToSet(windowSize, 1); // } // /** // * Creates a sliding window over the elements of the Stream, where each window is a Set of elements. // * The window moves over the elements of the Stream according to the specified window size and increment. // * This is an intermediate operation and can only be processed sequentially. // * // * @param windowSize The size of the window to be used for sliding over the Stream elements. // * @param increment The number of elements to move the window by each time. // * @return A new Stream where each element is a Set of elements from the original Stream, representing a window. // * @see #sliding(int, int) // */ // @SequentialOnly // @IntermediateOp // public abstract Stream> slidingToSet(int windowSize, int increment); /** * Creates a sliding window over the elements of the Stream, where each window is a List of elements. * The window moves over the elements of the Stream according to the specified window size. *
* This method only runs sequentially, even in parallel stream. * *

Example: *

     * 
     * Stream.of(1, 2, 3, 4, 5).sliding(3) => [[1, 2, 3], [2, 3, 4], [3, 4, 5]]
     * 
     * 
* * @param windowSize The size of the window to be used for sliding over the Stream elements. * @return A new Stream where each element is a List of elements from the original Stream, representing a window. */ @SequentialOnly @IntermediateOp public abstract Stream> sliding(final int windowSize); /** * Creates a sliding window over the elements of the Stream, where each window is a collection of elements. * The window moves over the elements of the Stream according to the specified window size. *
* This method only runs sequentially, even in parallel stream. * * @param windowSize The size of the window to be used for sliding over the Stream elements. * @param collectionSupplier The function to create a new collection for each window. * @return A new Stream where each element is a collection of elements from the original Stream, representing a window. * @see #sliding(int, int) */ @SequentialOnly @IntermediateOp public abstract > Stream sliding(int windowSize, IntFunction collectionSupplier); // /** // * Creates a sliding window over the elements of the Stream, where each window is a Set of elements. // * The window moves over the elements of the Stream according to the specified window size. // * This is an intermediate operation and can only be processed sequentially. // * // * @param windowSize The size of the window to be used for sliding over the Stream elements. // * @return A new Stream where each element is a Set of elements from the original Stream, representing a window. // * @see #sliding(int, int) // */ // @SequentialOnly // @IntermediateOp // public Stream> slidingToSet(final int windowSize) { // return slidingToSet(windowSize, 1); // } // /** // * Creates a sliding window over the elements of the Stream, where each window is a Set of elements. // * The window moves over the elements of the Stream according to the specified window size and increment. // * This is an intermediate operation and can only be processed sequentially. // * // * @param windowSize The size of the window to be used for sliding over the Stream elements. // * @param increment The number of elements to move the window by each time. // * @return A new Stream where each element is a Set of elements from the original Stream, representing a window. // * @see #sliding(int, int) // */ // @SequentialOnly // @IntermediateOp // public abstract Stream> slidingToSet(int windowSize, int increment); /** * Creates a sliding window over the elements of the Stream, where each window is a collection of elements. * The window moves over the elements of the Stream according to the specified window size. * The elements in each window are then collected into a result container using the provided Collector. *
* This method only runs sequentially, even in parallel stream. * * @param windowSize The size of the window to be used for sliding over the Stream elements. * @param collector The collector to be used for reduction of the elements in each window. * @return A new Stream where each element is a result container of the collected elements from the original Stream, representing a window. * @see #sliding(int, int) */ @SequentialOnly @IntermediateOp public abstract Stream sliding(int windowSize, Collector collector); // /** // * Creates a sliding window over the elements of the Stream, where each window is a Set of elements. // * The window moves over the elements of the Stream according to the specified window size. // * This is an intermediate operation and can only be processed sequentially. // * // * @param windowSize The size of the window to be used for sliding over the Stream elements. // * @return A new Stream where each element is a Set of elements from the original Stream, representing a window. // * @see #sliding(int, int) // */ // @SequentialOnly // @IntermediateOp // public Stream> slidingToSet(final int windowSize) { // return slidingToSet(windowSize, 1); // } // /** // * Creates a sliding window over the elements of the Stream, where each window is a Set of elements. // * The window moves over the elements of the Stream according to the specified window size and increment. // * This is an intermediate operation and can only be processed sequentially. // * // * @param windowSize The size of the window to be used for sliding over the Stream elements. // * @param increment The number of elements to move the window by each time. // * @return A new Stream where each element is a Set of elements from the original Stream, representing a window. // * @see #sliding(int, int) // */ // @SequentialOnly // @IntermediateOp // public abstract Stream> slidingToSet(int windowSize, int increment); /** * Creates a sliding window over the elements of the Stream, where each window is a List of elements. * The window moves over the elements of the Stream according to the specified window size and increment. * This is an intermediate operation and can only be processed sequentially. * *

Example: *

     * 
     * Stream.of(1, 2, 3, 4, 5).sliding(3, 2) => [[1, 2, 3], [3, 4, 5]]
     * 
     * 
* * @param windowSize The size of the window to be used for sliding over the Stream elements. * @param increment The number of elements to move the window by each time. * @return A new Stream where each element is a List of elements from the original Stream, representing a window. */ @SequentialOnly @IntermediateOp public abstract Stream> sliding(final int windowSize, final int increment); /** * Creates a sliding window over the elements of the Stream, where each window is a collection of elements. * The window moves over the elements of the Stream according to the specified window size and increment. *
* This method only runs sequentially, even in parallel stream. * * @param windowSize The size of the window to be used for sliding over the Stream elements. * @param increment The number of elements to move the window by each time. * @param collectionSupplier The function to create a new collection for each window. * @return A new Stream where each element is a collection of elements from the original Stream, representing a window. * @see #sliding(int, int) */ @SequentialOnly @IntermediateOp public abstract > Stream sliding(int windowSize, int increment, IntFunction collectionSupplier); /** * Creates a sliding window over the elements of the Stream, where each window is a collection of elements. * The window moves over the elements of the Stream according to the specified window size and increment. * The elements in each window are then collected into a result container using the provided Collector. *
* This method only runs sequentially, even in parallel stream. * * @param windowSize The size of the window to be used for sliding over the Stream elements. * @param increment The number of elements to move the window by each time. * @param collector The collector to be used for reduction of the elements in each window. * @return A new Stream where each element is a result container of the collected elements from the original Stream, representing a window. * @see #sliding(int, int) */ @SequentialOnly @IntermediateOp public abstract Stream sliding(int windowSize, int increment, Collector collector); /** * Intersperses the given delimiter between the elements of the stream. * *
* This method only runs sequentially, even in parallel stream. * *
* For example: *
     * 
     * Stream.of(1).intersperse(9) --> [1]
     * Stream.of(1, 2, 3).intersperse(9) --> [1, 9, 2, 9, 3]
     * 
     * 
* * @param delimiter The element to be inserted between each element of the stream. * @return A new Stream with the delimiter interspersed between each element. */ @SequentialOnly @IntermediateOp public abstract Stream intersperse(T delimiter); /** * Returns a stream consisting of the distinct elements of this stream, * where the duplicates are merged using the provided merge function. * This is an intermediate operation. * * @param mergeFunction a binary operator used to merge duplicate elements * @return a new Stream consisting of the distinct elements of this stream */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public Stream distinct(final BinaryOperator mergeFunction) { // ConcurrentHashMap is not required for parallel stream and it doesn't support null key. // final Supplier> supplier = isParallel() ? Suppliers. ofConcurrentHashMap() : Suppliers. ofLinkedHashMap(); if (isParallel()) { final Supplier> supplier = Suppliers.ofMap(); //noinspection resource return groupBy(Fn.identity(), Fn.identity(), mergeFunction, supplier) // .sequential() .map(Fn.value()) .parallel(maxThreadNum(), executorNumForVirtualThread(), splitor(), asyncExecutor(), cancelUncompletedThreads()); } else { final Supplier> supplier = Suppliers.ofLinkedHashMap(); //noinspection resource return groupBy(Fn.identity(), Fn.identity(), mergeFunction, supplier).map(Fn.value()); } } // /** // * Returns a stream consisting of the distinct elements of this stream. // * And only keep the elements whose occurrences are satisfied the specified predicate. // * // *
// * @implNote Equivalent to: {@code countBy(Fnn.identity(), supplier).filter(predicate).map(Fn.key())}. // * // * @param occurrencesFilter the predicate to apply to the count of occurrences of each element // * @return a new CheckedStream containing distinct elements // * @throws IllegalStateException if the stream is already closed // */ // @SequentialOnly // @IntermediateOp // @TerminalOpTriggered // public Stream distinct(final Predicate occurrencesFilter) { // // ConcurrentHashMap is not required for parallel stream and it doesn't support null key. // // final Supplier> supplier = isParallel() ? Suppliers. ofConcurrentHashMap() : Suppliers. ofLinkedHashMap(); // // final Supplier> supplier = Suppliers. ofLinkedHashMap(); // // if (isParallel()) { // return newStream(sequential() // // .countBy(Fn. identity(), supplier) // .filter(Fn. testByValue(occurrencesFilter)) // // .map(Fn. key()) // .iteratorEx(), sorted, cmp); // } else { // return newStream(countBy(Fn. identity(), supplier) // // .filter(Fn. testByValue(occurrencesFilter)) // .map(Fn. key()) // .iteratorEx(), sorted, cmp); // } // } /** * Returns a stream consisting of the distinct elements of this stream, * where the distinctness is determined by the value mapped from the given key mapper function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for comparison * @return a new Stream consisting of the distinct elements of this stream */ @ParallelSupported @IntermediateOp public abstract Stream distinctBy(Function keyMapper); /** * Returns a stream consisting of the distinct elements of this stream, * where the distinctness is determined by the value mapped from the given key mapper function. * Duplicates are merged using the provided merge function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for comparison * @param mergeFunction a binary operator used to merge duplicate elements * @return a new Stream consisting of the distinct elements of this stream */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public Stream distinctBy(final Function keyMapper, final BinaryOperator mergeFunction) { // ConcurrentHashMap is not required for parallel stream and it doesn't support null key. // final Supplier> supplier = isParallel() ? Suppliers. ofConcurrentHashMap() : Suppliers. ofLinkedHashMap(); if (isParallel()) { final Supplier> supplier = Suppliers.ofMap(); //noinspection resource return groupBy(keyMapper, Fn.identity(), mergeFunction, supplier) // .sequential() .map(Fn.value()) .parallel(maxThreadNum(), executorNumForVirtualThread(), splitor(), asyncExecutor(), cancelUncompletedThreads()); } else { final Supplier> supplier = Suppliers.ofLinkedHashMap(); //noinspection resource return groupBy(keyMapper, Fn.identity(), mergeFunction, supplier).map(Fn.value()); } } // /** // * Returns a Stream consisting of the distinct elements (according to the keyMapper function) of this stream. // * The elements are distinct according to {@link java.util.Objects#equals(Object, Object)} of the keys produced by the provided {@code keyMapper} function. // * The occurrencesFilter is applied to each distinct element in the form of a Map.Entry where the key is the distinct element and the value is the number of occurrences of that element in the original stream. // * Only the elements for which the {@code occurrencesFilter} returns {@code true} are included in the resulting stream. // * // * @param keyMapper a non-interfering, stateless function to apply to each element to determine its key for distinctness // * @param occurrencesFilter a non-interfering, stateless predicate to apply to each distinct element to determine if it should be included // * @return a new stream that contains distinct elements (according to the keyMapper function) of the original stream that satisfy the occurrencesFilter // */ // @ParallelSupported // @IntermediateOp // @TerminalOpTriggered // public Stream distinctBy(final Function keyMapper, final Predicate, Integer>> occurrencesFilter) { // // ConcurrentHashMap is not required for parallel stream and it doesn't support null key. // // final Supplier, Long>> supplier = isParallel() ? Suppliers., Long> ofConcurrentHashMap() // // : Suppliers., Long> ofLinkedHashMap(); // // if (isParallel()) { // final Supplier, Integer>> supplier = Suppliers., Integer> ofMap(); // // return countBy(Fn. keyed(keyMapper), supplier) // // .sequential() // .filter(occurrencesFilter) // .map(Fn. kkv()) // .parallel(maxThreadNum(), executorNumForVirtualThread(), splitor(), asyncExecutor(), cancelUncompletedThreads()); // } else { // final Supplier, Integer>> supplier = Suppliers., Integer> ofLinkedHashMap(); // // return countBy(Fn. keyed(keyMapper), supplier) // // .filter(occurrencesFilter) // .map(Fn. kkv()); // } // } // /** // * Distinct by the key extracted by {@code keyMapper} and limit the appearance of the elements with same key to the number calculated by {@code limit} // * // * @param keyMapper // * @param limit // * @return // * @see #groupBy(Function, Collector) // */ // @Beta // @ParallelSupported // @IntermediateOp // @TerminalOpTriggered // public Stream distinctLimitBy(final Function keyMapper, final BiFunction, Integer> limit) { // // ConcurrentHashMap is not required for parallel stream and it doesn't support null key. // // final Supplier, Long>> supplier = isParallel() ? Suppliers., Long> ofConcurrentHashMap() // // : Suppliers., Long> ofLinkedHashMap(); // // if (isParallel()) { // return groupBy(keyMapper) // // .sequential() // .flatmap(it -> subList(it.getValue(), 0, limit.apply(it.getKey(), it.getValue()))) // .parallel(maxThreadNum(), executorNumForVirtualThread(), splitor(), asyncExecutor(), cancelUncompletedThreads()); // } else { // final Supplier>> supplier = Suppliers.> ofLinkedHashMap(); // // return groupBy(keyMapper, Fn.identity(), supplier) // // .flatmap(it -> subList(it.getValue(), 0, limit.apply(it.getKey(), it.getValue()))); // } // } /** * Returns a Stream consisting of the elements of this stream, sorted according to the provided Comparator. * This is an intermediate operation. * * @param comparator a {@code non-null} Comparator to be used to compare stream elements * @return a new stream that contains the elements of the original stream, sorted according to the provided Comparator */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream sorted(Comparator comparator); /** * Returns a Stream consisting of the elements of this stream, sorted according to the natural order of the keys produced by the provided keyMapper function. * Nulls are considered smaller than other values in the natural order. * This is an intermediate operation. * * @param keyMapper a non-interfering, stateless function to apply to each element to determine its key for sorting * @return a new stream that contains the elements of the original stream, sorted according to the natural order of the keys produced by the keyMapper function * @see Comparators#comparingBy(Function) */ @SuppressWarnings("rawtypes") @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream sortedBy(Function keyMapper); /** * Returns a Stream consisting of the elements of this stream, * sorted according to the natural order of the integer values produced by the provided keyMapper function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for sorting * @return a new Stream consisting of the elements of this stream, sorted by the keys produced by the keyMapper function * @see Comparators#comparingInt(ToIntFunction) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream sortedByInt(ToIntFunction keyMapper); /** * Returns a Stream consisting of the elements of this stream, * sorted according to the natural order of the long values produced by the provided keyMapper function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for sorting * @return a new Stream consisting of the elements of this stream, sorted by the keys produced by the keyMapper function * @see Comparators#comparingLong(ToLongFunction) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream sortedByLong(ToLongFunction keyMapper); /** * Returns a Stream consisting of the elements of this stream, * sorted according to the natural order of the double values produced by the provided keyMapper function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for sorting * @return a new Stream consisting of the elements of this stream, sorted by the keys produced by the keyMapper function * @see Comparators#comparingDouble(ToDoubleFunction) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream sortedByDouble(ToDoubleFunction keyMapper); /** * Returns a Stream consisting of the elements of this stream, sorted in reverse order according to the provided Comparator. * This is an intermediate operation. * * @param comparator a {@code non-null} Comparator to be used to compare stream elements in reverse order * @return a new stream that contains the elements of the original stream, sorted in reverse order according to the provided Comparator * @see Comparators#reverseOrder(Comparator) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream reverseSorted(Comparator comparator); /** * Returns a Stream consisting of the elements of this stream, sorted in reverse order according to the natural order of the integer values produced by the provided keyMapper function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for sorting * @return a new stream that contains the elements of the original stream, sorted in reverse order according to the natural order of the keys produced by the keyMapper function * @see Comparators#reversedComparingInt(ToIntFunction) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream reverseSortedByInt(ToIntFunction keyMapper); /** * Returns a Stream consisting of the elements of this stream, sorted in reverse order according to the natural order of the long values produced by the provided keyMapper function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for sorting * @return a new stream that contains the elements of the original stream, sorted in reverse order according to the natural order of the keys produced by the keyMapper function * @see Comparators#reversedComparingLong(ToLongFunction) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream reverseSortedByLong(ToLongFunction keyMapper); /** * Returns a Stream consisting of the elements of this stream, sorted in reverse order according to the natural order of the double values produced by the provided keyMapper function. * This is an intermediate operation. * * @param keyMapper a function to extract the key for sorting * @return a new stream that contains the elements of the original stream, sorted in reverse order according to the natural order of the keys produced by the keyMapper function * @see Comparators#reversedComparingDouble(ToDoubleFunction) */ @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream reverseSortedByDouble(ToDoubleFunction keyMapper); /** * Returns a Stream consisting of the elements of this stream, sorted in reverse order according to the natural order of the keys produced by the provided keyMapper function. * Nulls are considered bigger than other values in the reverse order. * This is an intermediate operation. * * @param keyMapper a function to apply to each element to determine its key for sorting * @return a new stream that contains the elements of the original stream, sorted in reverse order according to the natural order of the keys produced by the keyMapper function * @see Comparators#reversedComparingBy(Function) */ @SuppressWarnings("rawtypes") @ParallelSupported @IntermediateOp @TerminalOpTriggered public abstract Stream reverseSortedBy(Function keyMapper); /** * Returns a Stream consisting of the top n elements of this stream, according to the natural order of the elements. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * * @param n the number of top elements to select from the stream * @return a new stream that contains the top n elements of the original stream * @throws IllegalArgumentException if n is less than zero */ @SequentialOnly @IntermediateOp public abstract Stream top(int n); /** * Returns a Stream consisting of the top n elements of this stream, sorted according to the provided Comparator. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * * @param n the number of top elements to select from the stream * @param comparator a {@code non-null} Comparator to be used to compare stream elements * @return a new stream that contains the top n elements of the original stream, sorted according to the provided Comparator */ @SequentialOnly @IntermediateOp public abstract Stream top(int n, Comparator comparator); /** * Skips a range of elements in the stream. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * * @param startInclusive the first position in the range to skip, inclusive * @param endExclusive the last position in the range to skip, exclusive * @return a new stream that skips the specified range of the original stream */ @SequentialOnly @IntermediateOp public abstract Stream skipRange(int startInclusive, int endExclusive); // /** // * // * // * @return // * @deprecated Use {@link #skipNulls()} instead // */ // @Deprecated // @SequentialOnly // @IntermediateOp // public abstract Stream skipNull(); /** * Returns a Stream consisting of the elements of this stream, excluding {@code null} elements. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * * @return a new stream that excludes {@code null} elements of the original stream */ @SequentialOnly @IntermediateOp public abstract Stream skipNulls(); /** * Returns a Stream consisting of the elements of this stream, excluding the last n elements. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * A queue with size up to n will be maintained to filter out the last n elements. * It may cause out of memory error if n is big enough. * * @param n the number of elements from the end of the stream to skip * @return a new stream that excludes the last n elements of the original stream */ @SequentialOnly @IntermediateOp public abstract Stream skipLast(int n); /** * Returns a Stream consisting of the last n elements of this stream. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * A queue with size up to n will be maintained to filter out the last n elements. * It may cause out of memory error if n is big enough. * All the elements will be loaded to get the last n elements and the Stream will be closed after that, if a terminal operation is triggered. * * @param n the number of elements from the end of the stream to include * @return a new stream that includes the last n elements of the original stream */ @Beta @SequentialOnly @IntermediateOp public abstract Stream last(int n); // // This is not frequently used method. it should not be defined as an api. // @Beta // @SequentialOnly // @IntermediateOp // public Stream> timed() { // if (isParallel()) { // return sequential().map(Timed::of).parallel(maxThreadNum(), executorNumForVirtualThread(), splitor(), asyncExecutor(), cancelUncompletedThreads()); // } else { // return map(Timed::of); // } // } /** * Performs the provided action on the first element of the stream as it is consumed. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * * @param action a non-interfering action to perform on the first element of the stream * @return a new stream that includes the action on the first element */ @Beta @SequentialOnly @IntermediateOp public abstract Stream onFirst(Consumer action); /** * Performs the provided action on the last element of the stream as it is consumed. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * * @param action a non-interfering action to perform on the last element of the stream * @return a new stream that includes the action on the last element */ @Beta @SequentialOnly @IntermediateOp public abstract Stream onLast(Consumer action); // Notes: onEachIf? bad smell? // /** // * Performs the provided action only on the elements of the stream that satisfy the given predicate as they are consumed. // * // * @implNote It's equivalent to: {@code onEach(Fn.acceptIf(predicate, action))}. // * // * @param predicate a non-interfering predicate to test on the elements of the stream // * @param action a non-interfering action to perform on the elements of the stream that satisfy the predicate // * @return a new stream that includes the action on the elements that satisfy the predicate // */ // @Beta // @ParallelSupported // @IntermediateOp // public Stream onEachIf(final Predicate predicate, final Consumer action) { // checkArgNotNull(predicate, cs.predicate); // checkArgNotNull(action, cs.action); // // return onEach(Fn.acceptIf(predicate, action)); // } /** * Performs the provided action on the first element of the stream as it is consumed. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * This method is useful for debugging and logging purposes. * * @param action a non-interfering action to perform on the first element of the stream * @return a new stream that includes the action on the first element * @see #onFirst(Consumer) */ @SequentialOnly @IntermediateOp public Stream peekFirst(final Consumer action) { return onFirst(action); } /** * Performs the provided action on the last element of the stream as it is consumed. * This is an intermediate operation and this method only runs sequentially, even in parallel stream. * This method is useful for debugging and logging purposes. * * @param action a non-interfering action to perform on the last element of the stream * @return a new stream that includes the action on the last element * @see #onLast(Consumer) */ @SequentialOnly @IntermediateOp public Stream peekLast(final Consumer action) { return onLast(action); } /** * Performs the provided action only on the elements pulled by downstream/terminal operation which satisfy the given predicate as they are consumed. * This method is useful for debugging and logging purposes. * * @param predicate a non-interfering predicate to test on the elements of the stream * @param action a non-interfering action to perform on the elements pulled by downstream/terminal operation which satisfy the given predicate as they are consumed * @return a new stream that includes the action on the elements that satisfy the predicate */ @Beta @ParallelSupported @IntermediateOp public Stream peekIf(final Predicate predicate, final Consumer action) { checkArgNotNull(predicate, cs.predicate); checkArgNotNull(action, cs.action); return peek(Fn.acceptIf(predicate, action)); } /** * Performs the provided action only on the elements pulled by downstream/terminal operation which satisfy the given predicate as they are consumed. * The predicate takes two parameters: the current stream element and the count of iterated elements (starts with 1). * This method is useful for debugging and logging purposes. * * @param predicate a non-interfering predicate to test on the elements of the stream and their iteration count * @param action a non-interfering action to perform on the elements pulled by downstream/terminal operation which satisfy the given predicate as they are consumed * @return a new stream that includes the action on the elements that satisfy the predicate * @throws IllegalArgumentException if the predicate or action is null */ @Beta @ParallelSupported @IntermediateOp public Stream peekIf(final BiPredicate predicate, final Consumer action) throws IllegalArgumentException { checkArgNotNull(predicate, cs.predicate); checkArgNotNull(action, cs.action); if (isParallel()) { final AtomicLong count = new AtomicLong(); return peek(it -> { if (predicate.test(it, count.incrementAndGet())) { action.accept(it); } }); } else { final MutableLong count = MutableLong.of(0); return peek(it -> { if (predicate.test(it, count.incrementAndGet())) { action.accept(it); } }); } } // TODO First of all, it only works in sequential Stream, not parallel stream (and maybe not work in some other scenarios as well). // Secondly, these onErrorXXX methods make it more difficult and complicated to use Stream. // So, remove them. // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param errorConsumer // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorContinue(Consumer errorConsumer); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param type // * @param errorConsumer // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorContinue(Class type, Consumer errorConsumer); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param errorPredicate // * @param errorConsumer // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorContinue(Predicate errorPredicate, Consumer errorConsumer); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param errorPredicate // * @param errorConsumer // * @param maxErrorCountToStop // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorContinue(Predicate errorPredicate, Consumer errorConsumer, int maxErrorCountToStop); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param fallbackValue // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorReturn(T fallbackValue); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param type // * @param fallbackValue // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorReturn(Class type, T fallbackValue); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param predicate // * @param fallbackValue // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorReturn(Predicate predicate, T fallbackValue); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param predicate // * @param supplierForFallbackValue // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorReturn(Predicate predicate, Supplier supplierForFallbackValue); // // /** // * This method should be only applied sequential {@code Stream} and whose up-streams are sequential {@code Streams} as well. // * Because error happening in the operations executed by parallel stream will stop iteration on that {@Stream}, so the down-streams won't be able to continue. // * // * @param predicate // * @param mapperForFallbackValue // * @param maxErrorCountToStop // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorReturn(Predicate predicate, Function mapperForFallbackValue, // int maxErrorCountToStop); // // /** // * // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public abstract Stream onErrorStop(); /** * Performs the provided action for each element of the Stream. * The action is a Consumer functional interface which accepts a single input argument and returns no result. * * @param action a non-interfering action to perform on the elements */ @Beta @ParallelSupported @TerminalOp public void foreach(final Consumer action) { forEach(Fn.from(action)); } /** * Performs the provided action for each element of the Stream. * The action is a Consumer functional interface which accepts a single input argument and returns no result. * * @param action The action to be performed for each element * @throws E Exception thrown by the Consumer * @see java.util.function.Consumer * @see com.landawn.abacus.util.Throwables.Consumer */ @ParallelSupported @TerminalOp public abstract void forEach(Throwables.Consumer action) throws E; /** * Performs the provided action for each element of the Stream, where the action is an instance of Throwables.IntObjConsumer. * The action is a Consumer functional interface which accepts an index parameter and an element parameter. * * @param action The action to be performed for each element * @throws E Exception thrown by the Consumer * @see com.landawn.abacus.util.Throwables.IntObjConsumer */ @ParallelSupported @TerminalOp public abstract void forEachIndexed(Throwables.IntObjConsumer action) throws E; /** * Performs the provided action for each element of the Stream until the action sets the MutableBoolean to {@code true}. * Iteration on this stream will also be stopped when this flag is set to {@code true}. * * @param the type of exception that the action may throw * @param action the action to be performed for each element, which accepts an element and a MutableBoolean * @throws E if the action throws an exception * @see #forEachUntil(MutableBoolean, Throwables.Consumer) */ @Beta @ParallelSupported @TerminalOp public abstract void forEachUntil(Throwables.BiConsumer action) throws E; /** * Performs the provided action for each element of the Stream until the flagToBreak is set to {@code true}. * Iteration on this stream will also be stopped when the flagToBreak is set to {@code true}. * If the flagToBreak is set to {@code true} at the beginning, no elements will be iterated from the stream before it is stopped and closed. * * @param the type of exception that the action may throw * @param flagToBreak a flag to break the for-each loop. Set it to {@code true} to break the loop if you don't want to continue the action. * Iteration on this stream will also be stopped when this flag is set to {@code true}. * @param action the action to be performed for each element * @throws E if the action throws an exception * @see #forEachUntil(Throwables.BiConsumer) */ @Beta @ParallelSupported @TerminalOp public abstract void forEachUntil(MutableBoolean flagToBreak, Throwables.Consumer action) throws E; /** * Performs the provided action for each element of the Stream * After all elements have been processed by the Consumer, the provided Runnable is executed. * * @param action The action to be performed for each element * @param onComplete The Runnable to be executed after all elements have been processed * @throws E Exception thrown by the Consumer * @throws E2 Exception thrown by the Runnable * @see N#forEach(Iterable, Throwables.Consumer) */ @ParallelSupported @TerminalOp public abstract void forEach(Throwables.Consumer action, Throwables.Runnable onComplete) throws E, E2; /** * Transforms the elements in the stream by applying the flatMapper function to each element and then performs the provided action for each element of the Stream. * The action is a BiConsumer functional interface which accepts an element of the Stream and an element of the Iterable produced by the flatMapper function. * * @param The type of elements in the Iterable produced by the flatMapper function * @param The type of exception that can be thrown by the flatMapper function * @param The type of exception that can be thrown by the BiConsumer action * @param flatMapper The function to transform elements of the Stream into Iterables * @param action The action to be performed for each element of the Stream and each element of the Iterable produced by the flatMapper function * @throws E Exception thrown by the flatMapper function * @throws E2 Exception thrown by the BiConsumer action * @see N#forEach(Iterable, Throwables.Function, Throwables.BiConsumer) */ @ParallelSupported @TerminalOp public abstract void forEach(Throwables.Function, E> flatMapper, Throwables.BiConsumer action) throws E, E2; /** * Transforms the elements in the stream by applying the flatMapper/flatMapper2 function to each element and then performs the provided action for each element of the Stream. * The action is a TriConsumer functional interface which accepts an element of the Stream and two elements of the Iterable produced by the flatMapper/flatMapper2 function. * * @param The type of elements in the Iterable produced by the first flatMapper function * @param The type of elements in the Iterable produced by the second flatMapper function * @param The type of exception that can be thrown by the first flatMapper function * @param The type of exception that can be thrown by the second flatMapper function * @param The type of exception that can be thrown by the TriConsumer action * @param flatMapper The first function to transform elements of the Stream into Iterables * @param flatMapper2 The second function to transform elements of the first Iterable into further Iterables * @param action The action to be performed for each element of the Stream and each element of the Iterables produced by the flatMapper functions * @throws E Exception thrown by the first flatMapper function * @throws E2 Exception thrown by the second flatMapper function * @throws E3 Exception thrown by the TriConsumer action * @see N#forEach(Iterable, Throwables.Function, Throwables.Function, Throwables.TriConsumer) */ @ParallelSupported @TerminalOp public abstract void forEach( Throwables.Function, E> flatMapper, Throwables.Function, E2> flatMapper2, Throwables.TriConsumer action) throws E, E2, E3; /** * Performs the provided action for each pair of elements in the Stream. * For the last non-paired element, the action is performed with the last element and {@code null}. * * @param The type of exception that can be thrown by the BiConsumer action * @param action The action to be performed for each pair of elements * @throws E Exception thrown by the BiConsumer action * @see N#forEachPair(Iterable, Throwables.BiConsumer) */ @ParallelSupported @TerminalOp public abstract void forEachPair(final Throwables.BiConsumer action) throws E; /** * Performs the provided action for each pair of elements in the Stream, where the action is an instance of Throwables.BiConsumer. * For the last non-paired element, the action is performed with the last element and {@code null}. * The pairs are formed by taking elements at an interval specified by the increment parameter. * * @param The type of exception that can be thrown by the BiConsumer action * @param increment The interval at which pairs of elements are formed * @param action The action to be performed for each pair of elements * @throws E Exception thrown by the BiConsumer action * @see N#forEachPair(Iterable, int, Throwables.BiConsumer) */ @ParallelSupported @TerminalOp public abstract void forEachPair(int increment, Throwables.BiConsumer action) throws E; /** * Performs the provided action for each triple of elements in the Stream. * For the last two non-tripled elements, the action is performed with the last two elements and {@code null}. * * @param The type of exception that can be thrown by the TriConsumer action * @param action The action to be performed for each triple of elements * @throws E Exception thrown by the TriConsumer action * @see N#forEachTriple(Iterable, Throwables.TriConsumer) */ @ParallelSupported @TerminalOp public abstract void forEachTriple(Throwables.TriConsumer action) throws E; /** * Performs the provided action for each triple of elements in the Stream. * For the last two non-tripled elements, the action is performed with the last two elements and {@code null}. * The triples are formed by taking elements at an interval specified by the increment parameter. * * @param The type of exception that can be thrown by the TriConsumer action * @param increment The interval at which triples of elements are formed * @param action The action to be performed for each triple of elements * @throws E Exception thrown by the TriConsumer action * @see N#forEachTriple(Iterable, int, Throwables.TriConsumer) */ @ParallelSupported @TerminalOp public abstract void forEachTriple(final int increment, final Throwables.TriConsumer action) throws E; /** * Checks if any elements of this Stream match the provided predicate. * This is a terminal operation. * * @param the type of exception that may be thrown by the predicate * @param predicate the predicate to apply to elements of this stream * @return {@code true} if any elements match the predicate, otherwise false * @throws E if the predicate throws an exception */ @ParallelSupported @TerminalOp public abstract boolean anyMatch(Throwables.Predicate predicate) throws E; /** * Checks if all elements of this Stream match the provided predicate. * This is a terminal operation. * * @param the type of exception that may be thrown by the predicate * @param predicate the predicate to apply to elements of this stream * @return {@code true} if all elements match the predicate or this Stream is empty, otherwise false * @throws E if the predicate throws an exception */ @ParallelSupported @TerminalOp public abstract boolean allMatch(Throwables.Predicate predicate) throws E; /** * Checks if no elements of this Stream match the provided predicate. * This is a terminal operation. * * @param the type of exception that may be thrown by the predicate * @param predicate the predicate to apply to elements of this stream * @return {@code true} if no elements match the predicate or this Stream is empty, otherwise false * @throws E if the predicate throws an exception */ @ParallelSupported @TerminalOp public abstract boolean noneMatch(Throwables.Predicate predicate) throws E; /** * Checks if the specified number of elements in the stream match the provided predicate. *
* The operation stops as soon as the number of elements matching the predicate is bigger than {@code atMost}. *
* @implNote it's equivalent to: {@code {@code atLeast} <= stream.filter(predicate).limit(atMost + 1).count() <= atMost} * * @param The type of exception that can be thrown by the predicate * @param {@code atLeast} The minimum number of elements that need to match the predicate * @param {@code atMost} The maximum number of elements that need to match the predicate * @param predicate The predicate to apply to elements in the stream * @return {@code true} if the number of elements matching the predicate is within the range [atLeast, atMost], {@code false} otherwise * @throws E Exception thrown by the predicate */ @ParallelSupported @TerminalOp public abstract boolean nMatch(long atLeast, long atMost, Throwables.Predicate predicate) throws E; /** * Returns the first element in the stream that matches the provided predicate. * If no element matches, an empty Optional is returned. * * @param the type of exception that the predicate may throw * @param predicate the predicate to apply to elements in the stream * @return an Optional containing the first matching element, or an empty Optional if no match is found * @throws E if the predicate throws an exception * @see #findAny(Throwables.Predicate) */ @ParallelSupported @TerminalOp public abstract Optional findFirst(Throwables.Predicate predicate) throws E; /** * Returns any element matched by the provided predicate if found. Otherwise, an empty Optional is returned. * This method is similar to findFirst but may have better performance in parallel streams. * * @param the type of exception that the predicate may throw * @param predicate the predicate to apply to elements in the stream * @return an Optional containing any matching element, or an empty Optional if no match is found * @throws E if the predicate throws an exception * @see #findFirst(Throwables.Predicate) */ @ParallelSupported @TerminalOp public abstract Optional findAny(Throwables.Predicate predicate) throws E; /** * Returns the last element matched by the provided predicate if found. Otherwise, an empty Optional is returned. * Consider using: {@code stream.reversed().findFirst(predicate)} for better performance when possible. * * @param the type of exception that the predicate may throw * @param predicate the predicate to apply to elements in the stream * @return an Optional containing the last matching element, or an empty Optional if no match is found * @throws E if the predicate throws an exception * @see #reversed() * @see #findFirst(Throwables.Predicate) */ @Beta @ParallelSupported @TerminalOp public abstract Optional findLast(Throwables.Predicate predicate) throws E; // /** // * Returns the first element matched by {@code predicateForFirst} if found or the first element if this stream is not empty // * Otherwise an empty {@code Optional} will be returned. // * // * @param // * @param predicateForFirst // * @return // * @throws E // */ // @ParallelSupported // @TerminalOp // public abstract Optional findFirstOrElseAny(Throwables.Predicate predicateForFirst) throws E; // // /** // * Returns the first element matched by {@code predicateForFirst} if found or the last element if this stream is not empty // * Otherwise an empty {@code Optional} will be returned. // * // * @param // * @param predicateForFirst // * @return // * @throws E // */ // @ParallelSupported // @TerminalOp // public abstract Optional findFirstOrElseLast(Throwables.Predicate predicateForFirst) throws E; // // /** // * Returns the first element matched by {@code predicateForFirst} if found or any element matched by {@code predicateForAny} (If this is a sequential stream, it will always be the first element matched by {@code predicateForAny}). // * Otherwise an empty {@code Optional} will be returned. // * // * @param // * @param // * @param predicateForFirst // * @param predicateForAny // * @return // * @throws E // * @throws E2 // */ // @ParallelSupported // @TerminalOp // public abstract Optional findFirstOrAny(Throwables.Predicate predicateForFirst, // Throwables.Predicate predicateForAny) throws E, E2; // // /** // * // * // * @param // * @param // * @param predicateForFirst // * @param predicateForLast // * @return // * @throws E // * @throws E2 // */ // @Beta // @SequentialOnly // @TerminalOp // public abstract Optional findFirstOrLast(Throwables.Predicate predicateForFirst, // Throwables.Predicate predicateForLast) throws E, E2; // // /** // *
// * This method only runs sequentially, even in parallel stream. // * // * @param // * @param // * @param // * @param init // * @param predicateForFirst // * @param predicateForLast // * @return // * @throws E // * @throws E2 // */ // @Beta // @SequentialOnly // @TerminalOp // public abstract Optional findFirstOrLast(final U init, // final Throwables.BiPredicate predicateForFirst, final Throwables.BiPredicate predicateForLast) // throws E, E2; // // /** // *
// * This method only runs sequentially, even in parallel stream. // * // * @param // * @param // * @param // * @param preFunc // * @param predicateForFirst // * @param predicateForLast // * @return // * @throws E // * @throws E2 // */ // @Beta // @SequentialOnly // @TerminalOp // public abstract Optional findFirstOrLast(final Function preFunc, // final Throwables.BiPredicate predicateForFirst, final Throwables.BiPredicate predicateForLast) // throws E, E2; /** * Checks if the stream contains all the specified elements. * * @param a the elements to check for presence in the stream * @return {@code true} if the stream contains all the specified elements or the specified array is empty, {@code false} otherwise */ @SuppressWarnings("unchecked") @SequentialOnly @TerminalOp public abstract boolean containsAll(T... a); /** * Checks if the stream contains all the specified elements. * * @param c the elements to check for presence in the stream * @return {@code true} if the stream contains all the specified elements or the specified collection is empty, {@code false} otherwise */ @SequentialOnly @TerminalOp public abstract boolean containsAll(Collection c); /** * Checks if the stream contains any of the specified elements. * * @param a the elements to check for presence in the stream * @return {@code true} if the stream contains any of the specified elements, {@code false} otherwise if doesn't or if the {@code Collection} is {@code null} or empty * @see #containsNone(Object[]) */ @SuppressWarnings("unchecked") @SequentialOnly @TerminalOp public abstract boolean containsAny(T... a); /** * Checks if the stream contains any of the specified elements. * * @param c the elements to check for presence in the stream * @return {@code true} if the stream contains any of the specified element, {@code false} otherwise if doesn't or if the {@code Collection} is {@code null} or empty * @see #containsNone(Collection) */ @SequentialOnly @TerminalOp public abstract boolean containsAny(Collection c); /** * Checks if the stream doesn't contain any of the specified elements. * * @param a the elements to check for presence in the stream * @return {@code true} if the stream doesn't contain any of the specified elements, or if this stream is empty, or if {@code valuesToFind} is {@code null} or empty, {@code false} otherwise * @see #containsAny(Object[]) */ @SuppressWarnings("unchecked") @SequentialOnly @TerminalOp public abstract boolean containsNone(T... a); /** * Checks if the stream doesn't contain any of the specified elements. * * @param c the elements to check for presence in the stream * @return {@code true} if the stream doesn't contain any of the specified elements, or if this stream is empty, or if {@code valuesToFind} is {@code null} or empty. {@code false} otherwise * @see #containsAny(Collection) */ @SequentialOnly @TerminalOp public abstract boolean containsNone(Collection c); /** * Returns an array containing the elements of this stream. * The provided generator function is used to allocate the returned array. * * @param the component type of the array * @param generator a function which produces a new array of the desired type and the provided length * @return an array containing the elements of this stream */ @SequentialOnly @TerminalOp public abstract A[] toArray(IntFunction generator); /** * Collects the elements of this stream into an immutable map. * The provided keyMapper and valueMapper functions are used to produce the keys and values for the map. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @return an immutable map containing the elements of this stream * @throws IllegalStateException if there are duplicated keys * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see #toImmutableMap(Throwables.Function, Throwables.Function, BinaryOperator) * @see #toMap(Throwables.Function, Throwables.Function) * @see #toMap(Throwables.Function, Throwables.Function, BinaryOperator) * @see Fn#throwingMerger() * @see Fn#replacingMerger() * @see Fn#ignoringMerger() */ @ParallelSupported @TerminalOp public ImmutableMap toImmutableMap(final Throwables.Function keyMapper, final Throwables.Function valueMapper) throws IllegalStateException, E, E2 { return ImmutableMap.wrap(toMap(keyMapper, valueMapper)); } /** * Collects the elements of this stream into an immutable map. * The provided keyMapper and valueMapper functions are used to produce the keys and values for the map. * The provided mergeFunction is used to merge values associated with the same key. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @param mergeFunction the function to merge values associated with the same key * @return an immutable map containing the elements of this stream * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see #toImmutableMap(Throwables.Function, Throwables.Function) * @see #toMap(Throwables.Function, Throwables.Function, BinaryOperator) * @see Fn#throwingMerger() * @see Fn#replacingMerger() * @see Fn#ignoringMerger() */ @ParallelSupported @TerminalOp public ImmutableMap toImmutableMap(final Throwables.Function keyMapper, final Throwables.Function valueMapper, final BinaryOperator mergeFunction) throws E, E2 { return ImmutableMap.wrap(toMap(keyMapper, valueMapper, mergeFunction)); } /** * Collects the elements of this stream into a map. * The provided keyMapper and valueMapper functions are used to produce the keys and values for the map. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @return a map containing the elements of this stream * @throws IllegalStateException if there are duplicated keys * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see #toMap(Throwables.Function, Throwables.Function, BinaryOperator) * @see Fn#throwingMerger() * @see Fn#replacingMerger() * @see Fn#ignoringMerger() */ @ParallelSupported @TerminalOp public abstract Map toMap(Throwables.Function keyMapper, Throwables.Function valueMapper) throws IllegalStateException, E, E2; /** * Collects the elements of this stream into a map. * The provided keyMapper and valueMapper functions are used to produce the keys and values for the map. * The mergeFunction is used to to merge values associated with the same key. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @param mergeFunction the function to merge values associated with the same key * @return a map containing the elements of this stream * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see #toMap(Throwables.Function, Throwables.Function, BinaryOperator, Supplier) * @see Fn#throwingMerger() * @see Fn#replacingMerger() * @see Fn#ignoringMerger() */ @ParallelSupported @TerminalOp public abstract Map toMap(Throwables.Function keyMapper, Throwables.Function valueMapper, BinaryOperator mergeFunction) throws E, E2; /** * Collects the elements of this stream into a map. * The provided keyMapper and valueMapper functions are used to produce the keys and values for the map. * The mapFactory is used to create the resulting map. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @param mapFactory a supplier to create the resulting map * @return a map containing the elements of this stream * @throws IllegalStateException if there are duplicated keys * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see #toMap(Throwables.Function, Throwables.Function, BinaryOperator, Supplier) * @see Fn#throwingMerger() * @see Fn#replacingMerger() * @see Fn#ignoringMerger() */ @ParallelSupported @TerminalOp public abstract , E extends Exception, E2 extends Exception> M toMap(Throwables.Function keyMapper, Throwables.Function valueMapper, Supplier mapFactory) throws IllegalStateException, E, E2; /** * Collects the elements of this stream into a map. * The provided keyMapper and valueMapper functions are used to produce the keys and values for the map. * The mergeFunction is used to merge values associated with the same key. * The mapFactory is used to create the resulting map. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @param mergeFunction the function to merge values associated with the same key * @param mapFactory a supplier to create the resulting map * @return a map containing the elements of this stream * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see Fn#throwingMerger() * @see Fn#replacingMerger() * @see Fn#ignoringMerger() */ @ParallelSupported @TerminalOp public abstract , E extends Exception, E2 extends Exception> M toMap(Throwables.Function keyMapper, Throwables.Function valueMapper, BinaryOperator mergeFunction, Supplier mapFactory) throws E, E2; // /** // * // * @param keyMapper // * @param downstream // * @return // * @see #groupTo(Throwables.Function, Collector) // * @deprecated replaced by {@code groupTo} // */ // @Deprecated // @ParallelSupported // @TerminalOp // public final Map toMap(Throwables.Function keyMapper, // final Collector downstream) throws E { // return groupTo(keyMapper, downstream); // } // // /** // * // * @param keyMapper // * @param downstream // * @param mapFactory // * @return // * @see #groupTo(Throwables.Function, Collector, Supplier) // * @deprecated replaced by {@code groupTo} // */ // @Deprecated // @ParallelSupported // @TerminalOp // public final , E extends Exception> M toMap(Throwables.Function keyMapper, // final Collector downstream, final Supplier mapFactory) throws E { // return groupTo(keyMapper, downstream, mapFactory); // } // // /** // * // * @param keyMapper // * @param valueMapper // * @param downstream // * @return // * @see #groupTo(Throwables.Function, Throwables.Function, Collector) // * @deprecated replaced by {@code groupTo} // */ // @Deprecated // @ParallelSupported // @TerminalOp // public final Map toMap(Throwables.Function keyMapper, // Throwables.Function valueMapper, final Collector downstream) throws E, E2 { // return groupTo(keyMapper, valueMapper, downstream); // } // // /** // * // * @param keyMapper // * @param valueMapper // * @param downstream // * @param mapFactory // * @return // * @see #groupTo(Throwables.Function, Throwables.Function, Collector, Supplier) // * @deprecated replaced by {@code groupTo} // */ // @Deprecated // @ParallelSupported // @TerminalOp // public final , E extends Exception, E2 extends Exception> M toMap(Throwables.Function keyMapper, // Throwables.Function valueMapper, final Collector downstream, final Supplier mapFactory) // throws E, E2 { // return groupTo(keyMapper, valueMapper, downstream, mapFactory); // } /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as lists of elements that map to the same key. * * @param the type of keys in the resulting map * @param the type of exception that the keyMapper function may throw * @param keyMapper a function to produce the keys for the map * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @see #toMultimap(Throwables.Function) */ @ParallelSupported @TerminalOp public abstract Map> groupTo(Throwables.Function keyMapper) throws E; /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as lists of elements that map to the same key. * The provided mapFactory is used to create the resulting map. * * @param the type of keys in the resulting map * @param the type of the resulting map * @param the type of exception that the keyMapper function may throw * @param keyMapper a function to produce the keys for the map * @param mapFactory a supplier to create the resulting map * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @see #toMultimap(Throwables.Function, Supplier) */ @ParallelSupported @TerminalOp public abstract >, E extends Exception> M groupTo(Throwables.Function keyMapper, final Supplier mapFactory) throws E; /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as lists of elements that map to the same key. * The provided valueMapper function is used to produce the values for the map. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see #toMultimap(Throwables.Function, Throwables.Function) */ @ParallelSupported @TerminalOp public abstract Map> groupTo(Throwables.Function keyMapper, Throwables.Function valueMapper) throws E, E2; /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as lists of elements that map to the same key. * The provided valueMapper function is used to produce the values for the map. * The provided mapFactory is used to create the resulting map. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @param mapFactory a supplier to create the resulting map * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see #toMultimap(Throwables.Function, Throwables.Function, Supplier) */ @ParallelSupported @TerminalOp public abstract >, E extends Exception, E2 extends Exception> M groupTo( Throwables.Function keyMapper, Throwables.Function valueMapper, Supplier mapFactory) throws E, E2; /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as collections * of elements that map to the same key, reduced using the provided downstream collector. * * @param the type of keys in the resulting map * @param the type of the result of the downstream reduction * @param the type of exception that the keyMapper function may throw * @param keyMapper a function to produce the keys for the map * @param downstream a collector to reduce the values associated with a key * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @see Collectors#groupingBy(Function, Collector) */ @ParallelSupported @TerminalOp public abstract Map groupTo(Throwables.Function keyMapper, final Collector downstream) throws E; /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as collections * of elements that map to the same key, reduced using the provided downstream collector. * The provided mapFactory is used to create the resulting map. * * @param the type of keys in the resulting map * @param the type of the result of the downstream reduction * @param the type of the resulting map * @param the type of exception that the keyMapper function may throw * @param keyMapper a function to produce the keys for the map * @param downstream a collector to reduce the values associated with a key * @param mapFactory a supplier to create the resulting map * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @see Collectors#groupingBy(Function, Collector, Supplier) */ @ParallelSupported @TerminalOp public abstract , E extends Exception> M groupTo(Throwables.Function keyMapper, final Collector downstream, final Supplier mapFactory) throws E; /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as collections * of elements that map to the same key, reduced using the provided downstream collector. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of the result of the downstream reduction * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @param downstream a collector to reduce the values associated with a key * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see Collectors#groupingBy(Function, Collector) */ @ParallelSupported @TerminalOp public abstract Map groupTo(Throwables.Function keyMapper, Throwables.Function valueMapper, final Collector downstream) throws E, E2; /** * Groups the elements of this stream by a key produced by the provided keyMapper function. * The resulting map will contain keys produced by the keyMapper function and values as collections * of elements that map to the same key, reduced using the provided downstream collector. * The provided mapFactory is used to create the resulting map. * * @param the type of keys in the resulting map * @param the type of values in the resulting map * @param the type of the result of the downstream reduction * @param the type of the resulting map * @param the type of exception that the keyMapper function may throw * @param the type of exception that the valueMapper function may throw * @param keyMapper a function to produce the keys for the map * @param valueMapper a function to produce the values for the map * @param downstream a collector to reduce the values associated with a key * @param mapFactory a supplier to create the resulting map * @return a map containing the elements of this stream grouped by the keys produced by the keyMapper function * @throws E if the keyMapper function throws an exception * @throws E2 if the valueMapper function throws an exception * @see Collectors#groupingBy(Function, Collector, Supplier) */ @ParallelSupported @TerminalOp public abstract , E extends Exception, E2 extends Exception> M groupTo( Throwables.Function keyMapper, Throwables.Function valueMapper, final Collector downstream, final Supplier mapFactory) throws E, E2; // /** // * // * @param // * @param // * @param flatKeyExtractor // * @return // * @throws E // */ // @ParallelSupported // @TerminalOp // public abstract Map> flatGroupTo(Throwables.Function, E> flatKeyExtractor) // throws E; // // /** // * // * @param // * @param // * @param // * @param flatKeyExtractor // * @param mapFactory // * @return // * @throws E // */ // @ParallelSupported // @TerminalOp // public abstract >, E extends Exception> M flatGroupTo( // final Throwables.Function, E> flatKeyExtractor, final Supplier mapFactory) throws E; // // /** // * // * @param // * @param // * @param // * @param // * @param flatKeyExtractor // * @param valueMapper // * @return // * @throws E // * @throws E2 // */ // @ParallelSupported // @TerminalOp // public abstract Map> flatGroupTo( // Throwables.Function, E> flatKeyExtractor, // Throwables.BiFunction valueMapper) throws E, E2; // // /** // * // * @param // * @param // * @param // * @param // * @param // * @param flatKeyExtractor // * @param valueMapper // * @param mapFactory // * @return // * @throws E // * @throws E2 // */ // @ParallelSupported // @TerminalOp // public abstract >, E extends Exception, E2 extends Exception> M flatGroupTo( // Throwables.Function, E> flatKeyExtractor, // Throwables.BiFunction valueMapper, Supplier mapFactory) throws E, E2; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided mapping function to each input element. * The values are Lists containing all input elements that map to the respective key. * The function can potentially map an element to multiple keys (hence flat in the method name). * * @param The type of the keys in the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @return A Map where each key is associated with a List of elements that were mapped to it * @throws E Exception thrown by the key mapping function */ @ParallelSupported @TerminalOp public abstract Map> flatGroupTo(Throwables.Function, E> flatKeyExtractor) throws E; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided mapping function to each input element. * The values are Lists containing all input elements that map to the respective key. * The function can potentially map an element to multiple keys (hence flat in the method name). * The provided mapFactory is used to create the resulting map. * * @param The type of the keys in the resulting Map * @param The type of the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @param mapFactory a supplier to create the resulting map * @return A Map where each key is associated with a List of elements that were mapped to it * @throws E Exception thrown by the key mapping function */ @ParallelSupported @TerminalOp public abstract >, E extends Exception> M flatGroupTo( final Throwables.Function, E> flatKeyExtractor, final Supplier mapFactory) throws E; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided key mapping function to each input element. * The values are Lists containing all values (produced by provided value mapping function) that map to the respective key. * The function can potentially map an element to multiple keys (hence flat in the method name). * Additionally, a value mapping function is applied to each element and its corresponding key to produce the values in the resulting Map. * * @param The type of the keys in the resulting Map * @param The type of the values in the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param The type of the exception that can be thrown by the value mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @param valueMapper A function that maps an input element and its corresponding key to a value * @return A Map where each key is associated with a List of values that were mapped to it * @throws E Exception thrown by the key mapping function * @throws E2 Exception thrown by the value mapping function */ @ParallelSupported @TerminalOp public abstract Map> flatGroupTo( Throwables.Function, E> flatKeyExtractor, Throwables.BiFunction valueMapper) throws E, E2; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided key mapping function to each input element. * The values are Lists containing all values (produced by provided value mapping function) that map to the respective key. * The function can potentially map an element to multiple keys (hence flat in the method name). * Additionally, a value mapping function is applied to each element and its corresponding key to produce the values in the resulting Map. * The resulting Map is created by a provided supplier function. * * @param The type of the keys in the resulting Map * @param The type of the values in the resulting Map * @param The type of the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param The type of the exception that can be thrown by the value mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @param valueMapper A function that maps an input element and its corresponding key to a value * @param mapFactory a supplier to create the resulting map * @return A Map where each key is associated with a List of values that were mapped to it * @throws E Exception thrown by the key mapping function * @throws E2 Exception thrown by the value mapping function */ @ParallelSupported @TerminalOp public abstract >, E extends Exception, E2 extends Exception> M flatGroupTo( Throwables.Function, E> flatKeyExtractor, Throwables.BiFunction valueMapper, Supplier mapFactory) throws E, E2; // /** // * // * @param // * @param // * @param // * @param // * @param flatKeyExtractor // * @param downstream // * @return // * @throws E // */ // @ParallelSupported // @TerminalOp // public abstract Map flatGroupTo(final Throwables.Function, E> flatKeyExtractor, // final Collector downstream) throws E; // // /** // * // * @param // * @param // * @param // * @param // * @param // * @param flatKeyExtractor // * @param downstream // * @param mapFactory // * @return // * @throws E // */ // @ParallelSupported // @TerminalOp // public abstract , E extends Exception> M flatGroupTo( // final Throwables.Function, E> flatKeyExtractor, final Collector downstream, // final Supplier mapFactory) throws E; // // /** // * // * @param // * @param // * @param // * @param // * @param // * @param // * @param flatKeyExtractor // * @param valueMapper // * @param downstream // * @return // * @throws E // * @throws E2 // */ // @ParallelSupported // @TerminalOp // public abstract Map flatGroupTo( // final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final Collector downstream) throws E, E2; // // /** // * // * @param // * @param // * @param // * @param // * @param // * @param // * @param // * @param flatKeyExtractor // * @param valueMapper // * @param downstream // * @param mapFactory // * @return // * @throws E // * @throws E2 // */ // @ParallelSupported // @TerminalOp // public abstract , E extends Exception, E2 extends Exception> M flatGroupTo( // final Throwables.Function, E> flatKeyExtractor, // final Throwables.BiFunction valueMapper, final Collector downstream, // final Supplier mapFactory) throws E, E2; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided key mapping function to each input element. * The values are Lists containing all input elements that map to the respective key. * The function can potentially map an element to multiple keys (hence flat in the method name). * The resulting Map is created by a provided supplier function. * * @param The type of the keys in the resulting Map * @param The type of the values in the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @param downstream A Collector that accumulates input elements into a mutable result container * @return A Map where each key is associated with a List of values that were mapped to it * @throws E Exception thrown by the key mapping function */ @ParallelSupported @TerminalOp public abstract Map flatGroupTo(Throwables.Function, E> flatKeyExtractor, Collector downstream) throws E; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided key mapping function to each input element. * The values are Lists containing all input elements that map to the respective key. * The function can potentially map an element to multiple keys (hence flat in the method name). * The resulting Map is created by a provided supplier function. * * @param The type of the keys in the resulting Map * @param The type of the values in the resulting Map * @param The type of the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @param downstream A Collector that accumulates input elements into a mutable result container * @param mapFactory A function that creates a new Map * @return A Map where each key is associated with a List of values that were mapped to it * @throws E Exception thrown by the key mapping function */ @ParallelSupported @TerminalOp public abstract , E extends Exception> M flatGroupTo( Throwables.Function, E> flatKeyExtractor, Collector downstream, Supplier mapFactory) throws E; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided key mapping function to each input element. * The values are computed by applying a provided value mapping function to each key and its associated elements. * The function can potentially map an element to multiple keys (hence flat in the method name). * * @param The type of the keys in the resulting Map * @param The type of the intermediate values used in the downstream collector * @param The type of the values in the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param The type of the exception that can be thrown by the value mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @param valueMapper A function that maps an input element and a key to an intermediate value * @param downstream A Collector that accumulates intermediate values into a final result * @return A Map where each key is associated with a value that were mapped to it * @throws E Exception thrown by the key mapping function * @throws E2 Exception thrown by the value mapping function */ @ParallelSupported @TerminalOp public abstract Map flatGroupTo( Throwables.Function, E> flatKeyExtractor, Throwables.BiFunction valueMapper, Collector downstream) throws E, E2; /** * Groups the elements of the Stream into a Map according to a function that maps each element to multiple keys. * The keys are produced by applying a provided key mapping function to each input element. * The values are computed by applying a provided value mapping function to each key and its associated elements. * The function can potentially map an element to multiple keys (hence flat in the method name). * The resulting Map is created by a provided collector function. * * @param The type of the keys in the resulting Map * @param The type of the intermediate values used in the downstream collector * @param The type of the values in the resulting Map * @param The type of the resulting Map * @param The type of the exception that can be thrown by the key mapping function * @param The type of the exception that can be thrown by the value mapping function * @param flatKeyExtractor A function that maps an input element to a collection of keys * @param valueMapper A function that maps an input element and a key to an intermediate value * @param downstream A Collector that accumulates intermediate values into a final result * @param mapFactory A Supplier that generates the resulting Map * @return A Map where each key is associated with a value that were mapped to it * @throws E Exception thrown by the key mapping function * @throws E2 Exception thrown by the value mapping function */ @ParallelSupported @TerminalOp public abstract , E extends Exception, E2 extends Exception> M flatGroupTo( Throwables.Function, E> flatKeyExtractor, Throwables.BiFunction valueMapper, final Collector downstream, Supplier mapFactory) throws E, E2; /** * Partitions the elements of the Stream into a Map according to a predicate. * The Map has a Boolean key, where true is associated with a list of elements that satisfy the predicate, * and false is associated with a list of elements that do not satisfy the predicate. * The returned {@code Map} always contains mappings for both {@code false} and {@code true} keys, even this stream is empty. * * @param The type of the exception that can be thrown by the predicate * @param predicate A function that tests whether an element should be included in the true or false list * @return A Map where the Boolean key is associated with a list of elements that satisfy or do not satisfy the predicate * @throws E Exception thrown by the predicate * @see #groupTo(Throwables.Function) * @see Collectors#partitioningBy(Predicate) */ @ParallelSupported @TerminalOp public abstract Map> partitionTo(final Throwables.Predicate predicate) throws E; /** * Partitions the elements of the Stream into a Map according to a predicate. * The Map has a Boolean key, where true is associated with a value that is the result of applying a downstream collector to the elements that satisfy the predicate, * and false is associated with a value that is the result of applying a downstream collector to the elements that do not satisfy the predicate. * The returned {@code Map} always contains mappings for both {@code false} and {@code true} keys, even this stream is empty. * * @param The result type of the downstream collector * @param The type of the exception that can be thrown by the predicate * @param predicate A function that tests whether an element should be included in the true or false group * @param downstream A Collector that accumulates elements into a final result * @return A Map where the Boolean key is associated with a value that is the result of applying a downstream collector to the elements that satisfy or do not satisfy the predicate * @throws E Exception thrown by the predicate * @see #groupTo(Throwables.Function, Collector) * @see Collectors#partitioningBy(Predicate, Collector) */ @ParallelSupported @TerminalOp public abstract Map partitionTo(Throwables.Predicate predicate, Collector downstream) throws E; /** * Converts the elements in this stream into a ListMultimap based on the provided key extractor function. * The keys in the resulting multimap are generated by applying the key extractor function to the elements. * * @param the type of keys in the resulting multimap * @param the type of exception that may be thrown by the key extractor function * @param keyMapper the function to extract keys from the elements * @return a ListMultimap where the keys are generated by the key extractor function and the values are the elements * @throws E if an exception occurs during key extraction * @see #groupTo(Throwables.Function) */ @ParallelSupported @TerminalOp public abstract ListMultimap toMultimap(Throwables.Function keyMapper) throws E; /** * Converts the elements in this stream into a Multimap based on the provided key extractor function. * The keys in the resulting multimap are generated by applying the key extractor function to the elements. * The provided mapFactory is used to create the resulting Multimap. * * @param the type of keys in the resulting multimap * @param the type of collection in the resulting multimap * @param the type of multimap to be returned * @param the type of exception that may be thrown by the key extractor function * @param keyMapper the function to extract keys from the elements * @param mapFactory the supplier to create a new multimap instance * @return a Multimap where the keys are generated by the key extractor function and the values are the elements * @throws E if an exception occurs during key extraction * @see #groupTo(Throwables.Function, Supplier) */ @ParallelSupported @TerminalOp public abstract , M extends Multimap, E extends Exception> M toMultimap( Throwables.Function keyMapper, Supplier mapFactory) throws E; /** * Converts the elements in this stream into a ListMultimap based on the provided key and value extractor functions. * The keys in the resulting ListMultimap are generated by applying the key extractor function to the elements, * and the values are generated by applying the value extractor function to the elements. * * @param the type of keys in the resulting ListMultimap * @param the type of values in the resulting ListMultimap * @param the type of exception that may be thrown by the key extractor function * @param the type of exception that may be thrown by the value extractor function * @param keyMapper the function to extract keys from the elements * @param valueMapper the function to extract values from the elements * @return a ListMultimap where the keys are generated by the key extractor function and the values are generated by the value extractor function * @throws E if an exception occurs during key extraction * @throws E2 if an exception occurs during value extraction * @see #groupTo(Throwables.Function, Throwables.Function) */ @ParallelSupported @TerminalOp public abstract ListMultimap toMultimap(Throwables.Function keyMapper, Throwables.Function valueMapper) throws E, E2; /** * Converts the elements in this stream into a Multimap based on the provided key and value extractor functions. * The keys in the resulting Multimap are generated by applying the key extractor function to the elements, * and the values are generated by applying the value extractor function to the elements. * The provided mapFactory is used to create the resulting Multimap. * * @param the type of keys in the resulting Multimap * @param the type of values in the resulting Multimap * @param the type of collection in the resulting Multimap * @param the type of Multimap to be returned * @param the type of exception that may be thrown by the key extractor function * @param the type of exception that may be thrown by the value extractor function * @param keyMapper the function to extract keys from the elements * @param valueMapper the function to extract values from the elements * @param mapFactory the supplier to create the Multimap instance * @return a Multimap where the keys are generated by the key extractor function and the values are generated by the value extractor function * @throws E if an exception occurs during key extraction * @throws E2 if an exception occurs during value extraction * @see #groupTo(Throwables.Function, Throwables.Function, Supplier) */ @ParallelSupported @TerminalOp public abstract , M extends Multimap, E extends Exception, E2 extends Exception> M toMultimap( Throwables.Function keyMapper, Throwables.Function valueMapper, Supplier mapFactory) throws E, E2; /** * Converts the elements of the Stream into a DataSet. * Element type {@code T} must be a Map or Bean for retrieving column names. * * @return A DataSet representation of the Stream elements. * @throws IllegalArgumentException if element type {@code T} is not Map or Bean. * @see {@link N#newDataSet(Collection)} */ @Beta @SequentialOnly @TerminalOp public abstract DataSet toDataSet() throws IllegalArgumentException; /** * Converts the elements of the Stream into a DataSet. * * @param columnNames The list of column names to be used in the DataSet. * @return A DataSet representation of the Stream elements. * @throws IllegalArgumentException if the specified {code columnNames} is empty and element type {@code T} is not Map or Bean. * @see {@link N#newDataSet(Collection, Collection)} */ @SequentialOnly @TerminalOp public abstract DataSet toDataSet(final List columnNames) throws IllegalArgumentException; /** * Performs a reduction on the elements of this stream, using the provided BinaryOperator, and returns an Optional. * The BinaryOperator should be an associative accumulation function, and it is applied from left to right to the elements in this stream. * This is a terminal operation. * *
* This method will always run sequentially, even in parallel stream. * * @param accumulator the function for combining two values * @return an Optional describing the result of the fold * @see #reduce(BinaryOperator) */ @SequentialOnly @TerminalOp public abstract Optional foldLeft(BinaryOperator accumulator); /** * Performs a reduction on the elements of this stream, using the provided accumulator function, and returns the reduced value. * The accumulator function takes two parameters: the current reduced value (or the initial value for the first element), and the current stream element. * This function is applied sequentially left to right to the elements in this stream. * This is a terminal operation. * *
* This method will always run sequentially, even in parallel stream. * * @param the type of the initial value and the return value * @param identity the initial value * @param accumulator the function for combining the current reduced value and the current stream element * @return the result of the reduction * @see #reduce(Object, BiFunction, BinaryOperator) */ @SequentialOnly @TerminalOp public abstract U foldLeft(U identity, BiFunction accumulator); /** * Performs a reduction on the elements of this stream, using the provided BinaryOperator, and returns an Optional. * The BinaryOperator should be an associative accumulation function, and it is applied from right to left to the elements in this stream. * This is a terminal operation. * *
* This method will always run sequentially, even in parallel stream. * * @param accumulator the function for combining two values * @return an Optional describing the result of the fold * @see #reduce(BinaryOperator) */ @SequentialOnly @TerminalOp public abstract Optional foldRight(BinaryOperator accumulator); /** * Performs a reduction on the elements of this stream, using the provided accumulator function, and returns the reduced value. * The accumulator function takes two parameters: the current reduced value (or the initial value for the first element), and the current stream element. * This function is applied sequentially right to left to the elements in this stream. * This is a terminal operation. * *
* This method will always run sequentially, even in parallel stream. * * @param the type of the initial value and the return value * @param identity the initial value * @param accumulator the function for combining the current reduced value and the current stream element * @return the result of the reduction * @see #reduce(Object, BiFunction, BinaryOperator) */ @SequentialOnly @TerminalOp public abstract U foldRight(U identity, BiFunction accumulator); /** * Performs a reduction on the elements of this stream, using the provided accumulator function, and returns the reduced value. * The accumulator function takes two parameters: the current reduced value and the current stream element. * * @param accumulator the function for combining the current reduced value and the current stream element * @return an Optional describing the result of the reduction. If the stream is empty, an empty Optional is returned. */ @ParallelSupported @TerminalOp public abstract Optional reduce(BinaryOperator accumulator); /** * Performs a reduction on the elements of this stream, using the provided accumulator function, and returns the reduced value. * The accumulator function takes two parameters: the current reduced value (or the initial value for the first element), and the current stream element. * * @param identity the initial value of the reduction operation * @param accumulator the function for combining the current reduced value and the current stream element * @return the result of the reduction */ @ParallelSupported @TerminalOp public T reduce(final T identity, final BinaryOperator accumulator) { return reduce(identity, accumulator, accumulator); } /** * Performs a reduction on the elements of this stream, using the provided accumulator function and combiner function, and returns the reduced value. * * @param the type of the initial value and the return value of the reduction operation * @param identity the initial value of the reduction operation * @param accumulator the function for combining the current reduced value and the current stream element * @param combiner the function for combining the results of the accumulator function * @return the result of the reduction */ @ParallelSupported @TerminalOp public abstract U reduce(U identity, BiFunction accumulator, BinaryOperator combiner); /** * Performs a reduction on the elements of this stream, using the provided accumulator function, until the specified condition is met. * This is a terminal operation. * * @param accumulator the function for combining the current reduced value and the current stream element * @param conditionToBreak the input parameter is the current reduced value returned by {@code accumulator}, not the current element from this Stream. * Returns {@code true} to break the loop if you don't want to continue the {@code action}. * Iteration on this stream will also be stopped when {@code true} is returned by {@code conditionToBreak}. * @return an Optional containing the result of the reduction if the stream is non-empty, otherwise an empty Optional */ @Beta @ParallelSupported @TerminalOp public abstract Optional reduceUntil(BinaryOperator accumulator, Predicate conditionToBreak); /** * Performs a reduction on the elements of this stream, using the provided accumulator function, until the specified condition is met. * This is a terminal operation. * * @param accumulator the function for combining the current reduced value and the current stream element * @param conditionToBreak the first input parameter is the current reduced value returned by {@code accumulator}, the second input parameter is the current element from this Stream. * Returns {@code true} to break the loop if you don't want to continue the {@code action}. * Iteration on this stream will also be stopped when {@code true} is returned by {@code conditionToBreak}. * @return an Optional containing the result of the reduction if the stream is non-empty, otherwise an empty Optional */ @Beta @ParallelSupported @TerminalOp public abstract Optional reduceUntil(BinaryOperator accumulator, BiPredicate conditionToBreak); /** * Performs a reduction on the elements of this stream, using the provided accumulator function, until the specified condition is met. * This is a terminal operation. * * @param identity the initial value * @param accumulator the function for combining the current reduced value and the current stream element * @param conditionToBreak the first input parameter is the current reduced value returned by {@code accumulator}, the second input parameter is the current element from this Stream. * Returns {@code true} to break the loop if you don't want to continue the {@code action}. * Iteration on this stream will also be stopped when {@code true} is returned by {@code conditionToBreak}. * @return the result of the reduction */ @Beta @ParallelSupported @TerminalOp public T reduceUntil(final T identity, final BinaryOperator accumulator, final Predicate conditionToBreak) { return reduceUntil(identity, accumulator, accumulator, conditionToBreak); } /** * Performs a reduction on the elements of this stream, using the provided accumulator function, until the specified condition is met. * This is a terminal operation. * * @param identity the initial value * @param accumulator the function for combining the current reduced value and the current stream element * @param conditionToBreak the first input parameter is the current reduced value returned by {@code accumulator}, the second input parameter is the current element from this Stream. * Returns {@code true} to break the loop if you don't want to continue the {@code action}. * Iteration on this stream will also be stopped when {@code true} is returned by {@code conditionToBreak}. * @return the result of the reduction */ @Beta @ParallelSupported @TerminalOp public T reduceUntil(final T identity, final BinaryOperator accumulator, final BiPredicate conditionToBreak) { return reduceUntil(identity, accumulator, accumulator, conditionToBreak); } /** * Performs a reduction on the elements of this stream, using the provided accumulator function and combiner, until the specified condition is met. * This is a terminal operation. * * @param the type of the identity and the return value * @param identity the initial value * @param accumulator the function for combining the current reduced value and the current stream element * @param combiner the function for combining two values, which is used in a parallel context * @param conditionToBreak the first input parameter is the current reduced value returned by {@code accumulator}, the second input parameter is the current element from this Stream. * Returns {@code true} to break the loop if you don't want to continue the {@code action}. * Iteration on this stream will also be stopped when {@code true} is returned by {@code conditionToBreak}. * @return the result of the reduction */ @Beta @ParallelSupported @TerminalOp public abstract U reduceUntil(U identity, BiFunction accumulator, BinaryOperator combiner, Predicate conditionToBreak); /** * Performs a reduction on the elements of this stream, using the provided accumulator function and combiner, until the specified condition is met. * This is a terminal operation. * * @param the type of the identity and the return value * @param identity the initial value * @param accumulator the function for combining the current reduced value and the current stream element * @param combiner the function for combining two values, which is used in a parallel context * @param conditionToBreak the first input parameter is the current reduced value returned by {@code accumulator}, the second input parameter is the current element from this Stream. * Returns {@code true} to break the loop if you don't want to continue the {@code action}. * Iteration on this stream will also be stopped when {@code true} is returned by {@code conditionToBreak}. * @return the result of the reduction */ @Beta @ParallelSupported @TerminalOp public abstract U reduceUntil(U identity, BiFunction accumulator, BinaryOperator combiner, BiPredicate conditionToBreak); /** * Performs a mutable reduction operation on the elements of this stream using a Collector. * * @param The type of the result * @param supplier a function that creates a new result container. For a parallel execution, this function may be called multiple times and must return a fresh value each time. * @param accumulator an associative, non-interfering, stateless function for incorporating an additional element into a result * @param combiner an associative, non-interfering, stateless function for combining two values, which must be compatible with the accumulator function. * It's unnecessary to specify {@code combiner} if {@code R} is a {@code Map/Collection/StringBuilder/Multiset/LongMultiset/Multimap/BooleanList/IntList/.../DoubleList}. * @return the result of the reduction * @see #collect(Supplier, BiConsumer) * @see #collect(Collector) * @see BiConsumers#ofAddAll() * @see BiConsumers#ofPutAll() */ @ParallelSupported @TerminalOp public abstract R collect(Supplier supplier, BiConsumer accumulator, BiConsumer combiner); /** * Performs a mutable reduction operation on the elements of this stream using a Collector. * *
* Only call this method when the returned type {@code R} is one types: {@code Collection/Map/StringBuilder/Multiset/LongMultiset/Multimap/BooleanList/IntList/.../DoubleList}. * Otherwise, please call {@link #collect(Supplier, BiConsumer, BiConsumer)}. * * @param The type of the result. It must be {@code Collection/Map/StringBuilder/Multiset/LongMultiset/Multimap/BooleanList/IntList/.../DoubleList}. * @param supplier A function that creates a new result container. For a parallel execution, this function may be called multiple times and must return a fresh value each time. * @param accumulator An associative, non-interfering, stateless function for incorporating an additional element into a result. * @throws IllegalArgumentException if the returned type {@code R} is not one of the types: {@code Collection/Map/StringBuilder/Multiset/LongMultiset/Multimap/BooleanList/IntList/.../DoubleList}. * @return the result of the reduction * @see #collect(Supplier, BiConsumer, BiConsumer) * @see #collect(Collector) */ @ParallelSupported @TerminalOp public abstract R collect(Supplier supplier, BiConsumer accumulator) throws IllegalArgumentException; /** * Performs a mutable reduction operation on the elements of this stream using a Collector. * This is a terminal operation. * * @param The type of the result. The result can be any type that can be produced by the Collector. * @param collector The Collector encapsulating the reduction operation. It provides functions for creating a new result container, incorporating an element into a result, and combining two result containers. * @return the result of the reduction */ @ParallelSupported @TerminalOp public abstract R collect(Collector collector); /** * Collects the elements of this stream using the provided Collector, then applies the provided function to the result. * This is a terminal operation. * * @param The type of the intermediate result produced by the Collector. * @param The type of the final result after applying the function. * @param The type of exception that may be thrown during the collection or function application. * @param downstream The Collector to perform the reduction operation on the elements of this stream. * @param func The function to apply to the result of the collection. * @return The final result after applying the function to the collected elements. * @throws E If an exception occurs during function application. */ @ParallelSupported @TerminalOp public abstract RR collectThenApply(Collector downstream, Throwables.Function func) throws E; /** * Collects the elements of this stream using the provided Collector, then applies the provided consumer to the result. * This is a terminal operation. * * @param The type of the intermediate result produced by the Collector. * @param The type of exception that may be thrown during the collection or consumer application. * @param downstream The Collector to perform the reduction operation on the elements of this stream. * @param consumer The consumer to apply to the result of the collection. * @throws E If an exception occurs during the consumer application. */ @ParallelSupported @TerminalOp public abstract void collectThenAccept(Collector downstream, Throwables.Consumer consumer) throws E; /** * Applies the provided function to a list of elements collected from this stream. * This is a terminal operation. * * @param The type of the result produced by the function. * @param The type of exception that may be thrown during the function application. * @param func The function to apply to the list of elements. * @return The result produced by applying the function to the list of elements. * @throws E If an exception occurs during the function application. */ @SequentialOnly @TerminalOp public abstract R toListThenApply(Throwables.Function, ? extends R, E> func) throws E; /** * Applies the provided consumer to a list of elements collected from this stream. * This is a terminal operation. * * @param The type of exception that may be thrown during the consumer application. * @param consumer The consumer to apply to the list of elements. * @throws E If an exception occurs during the consumer application. */ @SequentialOnly @TerminalOp public abstract void toListThenAccept(Throwables.Consumer, E> consumer) throws E; /** * Applies the provided function to a set of elements collected from this stream. * This is a terminal operation. * * @param The type of the result produced by the function. * @param The type of exception that may be thrown during the function application. * @param func The function to apply to the set of elements. * @return The result produced by applying the function to the set of elements. * @throws E If an exception occurs during the function application. */ @SequentialOnly @TerminalOp public abstract R toSetThenApply(Throwables.Function, ? extends R, E> func) throws E; /** * Applies the provided consumer to a set of elements collected from this stream. * This is a terminal operation. * * @param The type of exception that may be thrown during the consumer application. * @param consumer The consumer to apply to the set of elements. * @throws E If an exception occurs during the consumer application. */ @SequentialOnly @TerminalOp public abstract void toSetThenAccept(Throwables.Consumer, E> consumer) throws E; /** * Applies the provided function to a collection of elements collected from this stream. * This is a terminal operation. * * @param The type of the result produced by the function. * @param The type of exception that may be thrown during the function application. * @param supplier the supplier to create the Collection instance. * @param func The function to apply to the collection of elements. * @return The result produced by applying the function to the collection of elements. * @throws E If an exception occurs during the function application. */ @SequentialOnly @TerminalOp public abstract , E extends Exception> R toCollectionThenApply(Supplier supplier, Throwables.Function func) throws E; /** * Applies the provided consumer to a collection of elements collected from this stream. * This is a terminal operation. * * @param The type of the collection to be used. * @param The type of exception that may be thrown during the consumer application. * @param supplier The supplier to provide the collection of elements. * @param consumer The consumer to apply to the collection of elements. * @throws E If an exception occurs during the consumer application. */ @SequentialOnly @TerminalOp public abstract , E extends Exception> void toCollectionThenAccept(Supplier supplier, Throwables.Consumer consumer) throws E; /** * Returns an Optional describing the minimum element of this stream according to the provided comparator. * This is a terminal operation. * * @param comparator The comparator to compare elements of this stream. * @return An Optional describing the minimum element of this stream, or an empty Optional if the stream is empty. */ @ParallelSupported @TerminalOp public abstract Optional min(Comparator comparator); /** * Returns an Optional describing the minimum element of this stream according to the natural order of the keys produced by the provided keyMapper function. * Nulls are considered smaller than other values in the natural order. * This is a terminal operation. * * @param keyMapper A function to apply to each element to determine its key for comparison. * @return An Optional describing the minimum element of this stream, or an empty Optional if the stream is empty. */ @ParallelSupported @TerminalOp @SuppressWarnings("rawtypes") public Optional minBy(final Function keyMapper) { final Comparator comparator = Comparators.nullsLastBy(keyMapper); return min(comparator); } /** * Returns a List containing all the minimum elements of this stream according to the provided comparator. * This is a terminal operation. * * @param comparator The comparator to compare elements of this stream. * @return A List containing all the minimum elements of this stream. */ @ParallelSupported @TerminalOp public abstract List minAll(Comparator comparator); /** * Returns an Optional describing the maximum element of this stream according to the provided comparator. * This is a terminal operation. * * @param comparator The comparator to compare elements of this stream. * @return An Optional describing the maximum element of this stream, or an empty Optional if the stream is empty. */ @ParallelSupported @TerminalOp public abstract Optional max(Comparator comparator); /** * Returns an Optional describing the maximum element of this stream according to the natural order of the keys produced by the provided keyMapper function. * Nulls are considered smaller than other values in the operation looking for maximum value. * This is a terminal operation. * * @param keyMapper A function to apply to each element to determine its key for comparison. * @return An Optional describing the maximum element of this stream, or an empty Optional if the stream is empty. */ @ParallelSupported @TerminalOp @SuppressWarnings("rawtypes") public Optional maxBy(final Function keyMapper) { final Comparator comparator = Comparators.nullsFirstBy(keyMapper); return max(comparator); } /** * Returns a List containing all the maximum elements of this stream according to the provided comparator. * This is a terminal operation. * * @param comparator The comparator to compare elements of this stream. * @return A List containing all the maximum elements of this stream. */ @ParallelSupported @TerminalOp public abstract List maxAll(Comparator comparator); /** * Sums the integer values extracted from the elements in this stream using the provided function. * * @param mapper the function to extract integer values from the elements * @return the sum of the integer values * @see N#sumInt(Iterable, ToIntFunction) */ @ParallelSupported @TerminalOp public abstract long sumInt(ToIntFunction mapper); /** * Sums the long values extracted from the elements in this stream using the provided function. * * @param mapper the function to extract long values from the elements * @return the sum of the long values * @see N#sumLong(Iterable, ToLongFunction) */ @ParallelSupported @TerminalOp public abstract long sumLong(ToLongFunction mapper); /** * Sums the double values extracted from the elements in this stream using the provided function. * * @param mapper the function to extract double values from the elements * @return the sum of the double values * @see N#sumDouble(Iterable, ToDoubleFunction) */ @ParallelSupported @TerminalOp public abstract double sumDouble(ToDoubleFunction mapper); /** * Calculates the average of the integer values extracted from the elements in this stream using the provided function. * * @param mapper the function to extract integer values from the elements * @return the average of the integer values * @see N#averageInt(Iterable, ToIntFunction) */ @ParallelSupported @TerminalOp public abstract OptionalDouble averageInt(ToIntFunction mapper); /** * Calculates the average of the long values extracted from the elements in this stream using the provided function. * * @param mapper the function to extract long values from the elements * @return the average of the long values * @see N#averageLong(Iterable, ToLongFunction) */ @ParallelSupported @TerminalOp public abstract OptionalDouble averageLong(ToLongFunction mapper); /** * Calculates the average of the double values extracted from the elements in this stream using the provided function. * * @param mapper the function to extract double values from the elements * @return the average of the double values * @see N#averageDouble(Iterable, ToDoubleFunction) */ @ParallelSupported @TerminalOp public abstract OptionalDouble averageDouble(ToDoubleFunction mapper); /** * Returns the k-th largest element in the stream according to the provided comparator. * This is a terminal operation. * * @param k The rank of the element to find. k=1 would mean the largest element, k=2 the second largest, and so on. * @param comparator A comparator to determine the order of the elements. * @return An Optional containing the k-th largest element if it exists, otherwise an empty Optional. */ @ParallelSupported @TerminalOp public abstract Optional kthLargest(int k, Comparator comparator); /** * Calculates the percentiles of the elements in the stream according to the provided comparator. * All elements will be loaded into memory and sorted if not yet. * The returned map contains the percentile values as keys and the corresponding elements as values. * * @param comparator A comparator to determine the order of the elements. * @return An Optional containing a Map where the keys are the percentiles and the values are the corresponding elements. * If the stream is empty, an empty Optional is returned. * @see N#percentiles(int[]) */ @SequentialOnly @TerminalOp public abstract Optional> percentiles(Comparator comparator); /** * Checks if the stream contains duplicate elements. * This is a terminal operation. * * @return true if the stream contains duplicate elements, otherwise false. */ @SequentialOnly @TerminalOp public abstract boolean hasDuplicates(); // It won't work for findFirst/only/anyMatch... // @ParallelSupported // @TerminalOp // public abstract Pair countAnd(Throwables.Function, ? extends R, E> terminalAction) throws E; /** * Generates all possible combinations of the elements in this stream. * This is an intermediate operation and can only be processed sequentially. * The operation is stateful and may need to process the entire input before producing a result. * *
     * 
     * Stream.of(1, 2, 3).combinations().forEach(Fn.println());
     * // output
     * []
     * [1]
     * [2]
     * [3]
     * [1, 2]
     * [1, 3]
     * [2, 3]
     * [1, 2, 3]
     * 
     * 
* * @return A new stream where each element is a list representing a combination of elements from the original stream. * @see #combinations(int) * @see #combinations(int, boolean) */ @SequentialOnly @IntermediateOp public abstract Stream> combinations(); /** * Generates all possible combinations of the elements in this stream of the specified length. * This is an intermediate operation and can only be processed sequentially. * The operation is stateful and may need to process the entire input before producing a result. * *
     * 
     * Stream.of(1, 2, 3).combinations(2).forEach(Fn.println());
     * // output
     * [1, 2]
     * [1, 3]
     * [2, 3]
     * 
     * 
* * @param len The length of each combination. * @return A new stream where each element is a list representing a combination of elements from the original stream. * @see #combinations(int, boolean) * @see #permutations() * @see Iterables#permutations(Collection) */ @SequentialOnly @IntermediateOp public abstract Stream> combinations(int len); /** * Generates all possible combinations of the elements in this stream of the specified length, with or without repetition. * This is an intermediate operation and can only be processed sequentially. * The operation is stateful and may need to process the entire input before producing a result. * *
     * 
     * Stream.of(1, 2, 3).combinations(2, false).forEach(Fn.println());
     * // output
     * [1, 2]
     * [1, 3]
     * [2, 3]
     * ===================================================
     * Stream.of(1, 2, 3).combinations(2, true).forEach(Fn.println());
     * // output
     * [1, 1]
     * [1, 2]
     * [1, 3]
     * [2, 1]
     * [2, 2]
     * [2, 3]
     * [3, 1]
     * [3, 2]
     * [3, 3]
     * 
     * 
* * @param len The length of each combination. * @param repeat If {@code true}, elements can be repeated in the combinations; otherwise, elements are not repeated. * @return A new stream where each element is a list representing a combination of elements from the original stream. * @see Iterables#permutations(Collection) */ @SequentialOnly @IntermediateOp public abstract Stream> combinations(int len, boolean repeat); /** * Generates all permutations of the elements in the stream. * This operation is only applicable to sequential streams. The operation is stateful and may need to process the entire input * before producing a result. For example, one cannot permute elements until one has seen all of them. * As a result, for infinite input streams, this operation may never complete. * *
     * 
     * Stream.of(1, 2, 3).permutations().forEach(Fn.println());
     * // output
     * [1, 2, 3]
     * [1, 3, 2]
     * [3, 1, 2]
     * [3, 2, 1]
     * [2, 3, 1]
     * [2, 1, 3]
     * 
     * 
* * @return a new Stream consisting of all the permutations of the elements in the original Stream. * @see Iterables#permutations(Collection) */ @SequentialOnly @IntermediateOp public abstract Stream> permutations(); /** * Generates all ordered permutations of the elements in the stream. * * This operation is only applicable to sequential streams. The operation is stateful and may need to process the entire input * before producing a result. For example, one cannot permute elements until one has seen all of them. * As a result, for infinite input streams, this operation may never complete. * *
     * 
     * Stream.of(1, 2, 3).orderedPermutations().forEach(Fn.println());
     * // output
     * [1, 2, 3]
     * [1, 3, 2]
     * [2, 1, 3]
     * [2, 3, 1]
     * [3, 1, 2]
     * [3, 2, 1]
     * 
     * 
* * @return a new Stream consisting of all the ordered permutations of the elements in the original Stream. * @see Iterables#orderedPermutations(Collection) */ @SequentialOnly @IntermediateOp public abstract Stream> orderedPermutations(); /** * Generates all ordered permutations of the elements in the stream, according to the specified comparator. * * This operation is only applicable to sequential streams. The operation is stateful and may need to process the entire input * before producing a result. For example, one cannot permute elements until one has seen all of them. * As a result, for infinite input streams, this operation may never complete. * * The order of the permutations is determined by the provided comparator. If the comparator views two elements as equal, * there are no guarantees on the order of those elements in the resulting permutations. * *
     * 
     * Stream.of(1, 2, 3).orderedPermutations(Comparator.reverseOrder()).forEach(Fn.println());
     * // output
     * [3, 2, 1]
     * [3, 1, 2]
     * [2, 3, 1]
     * [2, 1, 3]
     * [1, 3, 2]
     * [1, 2, 3]
     * 
     * 
* * @param comparator the comparator that should be used to determine the order of the elements in the permutations * @return a new Stream consisting of all the ordered permutations of the elements in the original Stream. * @see Iterables#orderedPermutations(Collection, Comparator) */ @SequentialOnly @IntermediateOp public abstract Stream> orderedPermutations(Comparator comparator); /** * Generates the Cartesian product of the provided collections. * The Cartesian product is a mathematical operation that returns a set from multiple sets. * Each member of the Cartesian product is an ordered pair consisting of one member from each of the given sets. * * This operation is only applicable to sequential streams. The operation is stateful and may need to process the entire input * before producing a result. For example, one cannot generate a Cartesian product until one has seen all elements from all sets. * As a result, for infinite input streams, this operation may never complete. * *
     * 
     * Stream.of(1, 2, 3).cartesianProduct(Arrays.asList(4, 5), Arrays.asList(6, 7)).forEach(Fn.println());
     * // output
     * [1, 4, 6]
     * [1, 4, 7]
     * [1, 5, 6]
     * [1, 5, 7]
     * [2, 4, 6]
     * [2, 4, 7]
     * [2, 5, 6]
     * [2, 5, 7]
     * [3, 4, 6]
     * [3, 4, 7]
     * [3, 5, 6]
     * [3, 5, 7]
     * 
     * 
* * @param cs the collections to generate the Cartesian product from * @return a new Stream consisting of all the ordered pairs in the Cartesian product of the elements in the original collections. * @see #cartesianProduct(Collection) * @see Iterables#cartesianProduct(Collection...) */ @SequentialOnly @IntermediateOp @SafeVarargs public final Stream> cartesianProduct(final Collection... cs) { return cartesianProduct(Arrays.asList(cs)); } /** * Generates the Cartesian product of the provided collections. * The Cartesian product is a mathematical operation that returns a set from multiple sets. * Each member of the Cartesian product is an ordered pair consisting of one member from each of the given sets. * * This operation is only applicable to sequential streams. The operation is stateful and may need to process the entire input * before producing a result. For example, one cannot generate a Cartesian product until one has seen all elements from all sets. * As a result, for infinite input streams, this operation may never complete. * *
     * 
     * Stream.of(1, 2, 3).cartesianProduct(Arrays.asList(Arrays.asList(4, 5), Arrays.asList(6, 7))).forEach(Fn.println());
     * // output
     * [1, 4, 6]
     * [1, 4, 7]
     * [1, 5, 6]
     * [1, 5, 7]
     * [2, 4, 6]
     * [2, 4, 7]
     * [2, 5, 6]
     * [2, 5, 7]
     * [3, 4, 6]
     * [3, 4, 7]
     * [3, 5, 6]
     * [3, 5, 7]
     * 
     * 
* * @param cs the collections to generate the Cartesian product from * @return a new Stream consisting of all the ordered pairs in the Cartesian product of the elements in the original collections. * @see Iterables#cartesianProduct(Collection) */ @SequentialOnly @IntermediateOp public abstract Stream> cartesianProduct(Collection> cs); /** * Generates a rollup of the elements in the stream. * A rollup is a form of data summarization that aggregates data by ascending levels of hierarchy. * * This operation is only applicable to sequential streams. The operation is stateful and may need to process the entire input * before producing a result. For example, one cannot generate a rollup until one has seen all elements. * As a result, for infinite input streams, this operation may never complete. * *
     * 
     * Stream.of(1, 2, 3).rollup().forEach(System.out::println);
     * // output
     * []
     * [1]
     * [1, 2]
     * [1, 2, 3]
     * 
     * 
* * @return a new Stream consisting of Lists that represent the rollup of the elements in the original stream. * @see Iterables#rollup(Collection) */ @SequentialOnly @IntermediateOp @TerminalOpTriggered public abstract Stream> rollup(); /** * Returns a stream consisting of the elements of this stream that are also present in the specified collection. Occurrences are considered. * The comparison is based on the values obtained by applying the provided function to each element of the stream. * The order of the elements in the stream is preserved. * * This operation is stateful and may need to process the entire input before producing a result. * For example, one cannot generate an intersection until one has seen all elements from both the stream and the collection. * As a result, for infinite input streams, this operation may never complete. * *
     * 
     * Stream.of("apple", "apple", "apple", "banana", "banana", "cherry").intersection(String::length, Arrays.asList(5, 6, 5)).forEach(System.out::println);
     * // output
     * "apple"
     * "apple"
     * "banana"
     * 
     * 
* * @param mapper a function that transforms the stream elements into a form that can be compared with the elements in the collection * @param c the collection to intersect with * @return a new Stream consisting of the elements in the intersection of the transformed stream and the provided collection * @see N#intersection(int[], int[]) * @see N#intersection(Collection, Collection) * @see N#commonSet(Collection, Collection) * @see Collection#retainAll(Collection) */ @ParallelSupported @IntermediateOp public abstract Stream intersection(Function mapper, Collection c); /** * Returns a new stream that contains only the elements that are present in the original stream but not in the provided collection. Occurrences are considered. * The comparison is based on the values obtained by applying the provided function to each element of the stream. * * This operation is stateful and may need to process the entire input before producing a result. * For example, one cannot generate a difference until one has seen all elements from both the stream and the collection. * As a result, for infinite input streams, this operation may never complete. * *
     * 
     * Stream.of("apple", "apple", "apple", "banana", "banana", "cherry").difference(String::length, Arrays.asList(5, 6, 5)).forEach(System.out::println);
     * // output
     * "apple"
     * "banana"
     * "cherry"
     * 
     * 
* * @param mapper a function that transforms the stream elements into a form that can be compared with the elements in the collection * @param c the collection to find the difference with * @return a new Stream consisting of the elements in the difference of the transformed stream and the provided collection * @see IntList#difference(IntList) * @see N#difference(Collection, Collection) * @see N#symmetricDifference(Collection, Collection) * @see N#excludeAll(Collection, Collection) * @see N#excludeAllToSet(Collection, Collection) * @see N#removeAll(Collection, Iterable) * @see N#intersection(Collection, Collection) * @see N#commonSet(Collection, Collection) * @see Difference#of(Collection, Collection) */ @ParallelSupported @IntermediateOp public abstract Stream difference(Function mapper, Collection c); // /** // * // * @param defaultValues // * @return // * @see #appendIfEmpty(Object...) // */ // @SequentialOnly // @IntermediateOp // public final Stream defaultIfEmpty(final Collection defaultValues) { // return appendIfEmpty(defaultValues); // } // @SequentialOnly // public abstract Stream appendAlll(Collection> cs); /** * Prepends the specified elements to the beginning of this stream. * This is an intermediate operation and can only be processed sequentially. * * @param a the elements to prepend to the stream * @return a new stream with the specified elements prepended */ @SequentialOnly @IntermediateOp @SafeVarargs public final Stream prepend(final T... a) { return prepend(N.asList(a)); } /** * Prepends the specified collection of elements to the beginning of this stream. * This is an intermediate operation and can only be processed sequentially. * * @param c the collection of elements to prepend to the stream * @return a new stream with the specified collection of elements prepended */ @SequentialOnly @IntermediateOp public abstract Stream prepend(Collection c); /** * Appends the specified elements to the end of this stream. * This is an intermediate operation and can only be processed sequentially. * * @param a the elements to append to the stream * @return a new stream with the specified elements appended */ @SequentialOnly @IntermediateOp @SafeVarargs public final Stream append(final T... a) { return append(N.asList(a)); } /** * Appends the specified collection of elements to the end of this stream. * This is an intermediate operation and can only be processed sequentially. * * @param c the collection of elements to append to the stream * @return a new stream with the specified collection of elements appended */ @SequentialOnly @IntermediateOp public abstract Stream append(Collection c); // @SequentialOnly // public abstract Stream appendAlll(Collection> cs); // @SequentialOnly // public abstract Stream prependAlll(Collection> cs); /** * Appends the specified elements to the end of this stream if the stream is empty. * This is an intermediate operation and can only be processed sequentially. * * @param a the elements to append to the stream * @return a new stream with the specified elements appended if the stream is empty, otherwise the original stream */ @SequentialOnly @IntermediateOp @SafeVarargs public final Stream appendIfEmpty(final T... a) { return appendIfEmpty(N.asList(a)); } /** * Appends the specified collection of elements to the end of this stream if the stream is empty. * This is an intermediate operation and can only be processed sequentially. * * @param c the collection of elements to append to the stream * @return a new stream with the specified collection of elements appended if the stream is empty, otherwise the original stream */ @SequentialOnly @IntermediateOp public abstract Stream appendIfEmpty(Collection c); // /** // * Returns a reusable stream which can be repeatedly used. // * // *
// * All elements will be loaded to memory. // * // * @param generator // * @return // */ // @SequentialOnly // public abstract Stream cached(IntFunction generator); // /** // * The Stream will be closed finally, no matter it's empty or not. // * // * @param func // * @return // */ // @Beta // public abstract Optional applyIfNotEmpty(Try.Function, ? extends R, E> func) throws E; // // /** // * The Stream will be closed finally, no matter it's empty or not. // * // * @param action // * // */ // @Beta // public abstract OrElse acceptIfNotEmpty(Try.Consumer, E> action) throws E; /** * Returns a stream that contains the specified default value if the original stream is empty. * This is an intermediate operation and can only be processed sequentially. * * @param defaultValue the default value to be used if the stream is empty * @return a new stream that contains the default value if the original stream is empty * @see #appendIfEmpty(Object...) */ @SequentialOnly @IntermediateOp public final Stream defaultIfEmpty(final T defaultValue) { return appendIfEmpty(defaultValue); } /** * Returns a new Stream with elements from a temporary queue which is filled by fetching elements from this Stream asynchronously with a new thread. * Default queue size is 64. *
* Mostly it's for {@code read-write with different threads} mode. * * @return a new Stream that is buffered * @see #buffered(int) */ @SequentialOnly @IntermediateOp public abstract Stream buffered(); /** * Returns a new Stream with elements from a temporary queue which is filled by fetching elements from this Stream asynchronously with a new thread. * This method is primarily used for read-write operations with different threads. * * @param bufferSize the size of the buffer to be used for the Stream * @return a new Stream that is buffered * @throws IllegalStateException if the stream has already been operated upon or closed * @throws IllegalArgumentException if bufferSize is non-positive */ @SequentialOnly @IntermediateOp public abstract Stream buffered(int bufferSize); /** * Returns a new Stream with elements from a temporary queue which is filled by fetching elements from this Stream asynchronously with a new thread. * This method is primarily used for read-write operations with different threads. * * @param queueToBuffer the queue to be used for buffering the Stream * @return a new Stream that is buffered * @throws IllegalArgumentException if queueToBuffer is null */ @SequentialOnly @IntermediateOp public abstract Stream buffered(final BlockingQueue queueToBuffer); // /** // * Returns a new Stream with elements from a temporary queue which is filled by fetching elements from this Stream asynchronously with a new thread. // * Default queue size is 64. // * // * @return // */ // @SequentialOnly // @IntermediateOp // public Stream buffered() { // return queued(); // } // // /** // * Returns a new Stream with elements from a temporary queue which is filled by fetching elements from this Stream asynchronously with a new thread. // * // * @param bufferSize // * @return // */ // @SequentialOnly // @IntermediateOp // public Stream buffered(final int bufferSize) { // return queued(bufferSize); // } // /** // * // * @param b // * @param nextSelector a function to determine which element should be selected as next element. // * The first parameter is selected if {@code MergeResult.TAKE_FIRST} is returned, otherwise the second parameter is selected. // * @return // * @deprecated replaced by {@code mergeWith(Stream, BiFunction)} // * @see #mergeWith(Stream, BiFunction) // */ // @Deprecated // @SequentialOnly // @IntermediateOp // public Stream merge(final Stream b, final BiFunction nextSelector) { // return mergeWith(b, nextSelector); // } /** * Merges the current Stream with a given Collection. The order of elements in the resulting Stream * is determined by the provided BiFunction, which takes two elements and returns a MergeResult. * If the BiFunction returns MergeResult.FIRST, the element from the current Stream is selected. * If it returns MergeResult.SECOND, the element from the given Collection is selected. * * @param b the Collection to be merged. It should be ordered. with the current Stream * @param nextSelector a BiFunction that determines the order of elements in the merged Stream * @return a new Stream that is the result of merging the current Stream with the given Collection */ @SequentialOnly @IntermediateOp public abstract Stream mergeWith(final Collection b, final BiFunction nextSelector); /** * Merges the current Stream with a given Stream. The order of elements in the resulting Stream * is determined by the provided BiFunction, which takes two elements and returns a MergeResult. * If the BiFunction returns MergeResult.FIRST, the element from the current Stream is selected. * If it returns MergeResult.SECOND, the element from the given Stream is selected. * * @param b the Stream to be merged. It should be ordered. with the current Stream * @param nextSelector a BiFunction that determines the order of elements in the merged Stream * @return a new Stream that is the result of merging the current Stream with the given Stream */ @SequentialOnly @IntermediateOp public abstract Stream mergeWith(final Stream b, final BiFunction nextSelector); /** * Zips this stream with the given collection using the provided zip function. * The zip function takes elements from this stream and the given collection until either the current stream or the given collection runs out of elements. * The resulting stream will have the length of the shorter of the current stream and the given collection. * * @param the type of elements in the given Collection * @param the type of elements in the resulting Stream * @param b the Collection to be combined with the current Stream * @param zipFunction a BiFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Collection * @see #zipWith(Collection, Object, Object, BiFunction) * @see N#zip(Iterable, Iterable, BiFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(final Collection b, final BiFunction zipFunction); /** * Zips this stream with the given collection using the provided zip function. * The zip function combines elements from this stream and the given collection until both the current stream or the given collection runs out of elements. * The resulting stream will have the length of the longer of the current stream and the given collection. * If the current stream or the given collection runs out of elements before the other, the provided default values are used. * * @param the type of elements in the given Collection * @param the type of elements in the resulting Stream * @param b the Collection to be combined with the current Stream * @param valueForNoneA the default value to use for the current Stream when it runs out of elements * @param valueForNoneB the default value to use for the Collection when it runs out of elements * @param zipFunction a BiFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Collection * @see N#zip(Iterable, Iterable, Object, Object, BiFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(Collection b, T valueForNoneA, final T2 valueForNoneB, BiFunction zipFunction); /** * Zips this stream with the given collections using the provided zip function. * The zip function takes elements from this stream and the given collections until either the current stream or one of the given collections runs out of elements. * The resulting stream will have the length of the shortest of the current stream and the given collections. * * @param the type of elements in the first given Collection * @param the type of elements in the second given Collection * @param the type of elements in the resulting Stream * @param b the first Collection to be combined with the current Stream * @param c the second Collection to be combined with the current Stream * @param zipFunction a TriFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Collections * @see #zipWith(Collection, Collection, Object, Object, Object, TriFunction) * @see N#zip(Iterable, Iterable, Iterable, TriFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(Collection b, final Collection c, TriFunction zipFunction); /** * Zips this stream with the given collections using the provided zip function. * The zip function combines elements from this stream and the given collections until both the current stream or the given collections runs out of elements. * The resulting stream will have the length of the longest of the current stream and the given collections. * If the current stream or one of the given collections runs out of elements before the other, the provided default values are used. * * @param the type of elements in the first given Collection * @param the type of elements in the second given Collection * @param the type of elements in the resulting Stream * @param b the first Collection to be combined with the current Stream * @param c the second Collection to be combined with the current Stream * @param valueForNoneA the default value to use for the current Stream when it runs out of elements * @param valueForNoneB the default value to use for the first Collection when it runs out of elements * @param valueForNoneC the default value to use for the second Collection when it runs out of elements * @param zipFunction a TriFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Collections * @see N#zip(Iterable, Iterable, Iterable, Object, Object, Object, TriFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(Collection b, Collection c, T valueForNoneA, T2 valueForNoneB, T3 valueForNoneC, TriFunction zipFunction); /** * Zips this stream with the given stream using the provided zip function. * The zip function takes elements from this stream and the given stream until either the current stream or the given stream runs out of elements. * The resulting stream will have the length of the shorter of the current stream and the given stream. * * @param the type of elements in the given Stream * @param the type of elements in the resulting Stream * @param b the Stream to be combined with the current Stream * @param zipFunction a BiFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Stream * @see #zipWith(Stream, Object, Object, BiFunction) * @see N#zip(Iterable, Iterable, BiFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(final Stream b, final BiFunction zipFunction); /** * Zips this stream with the given stream using the provided zip function. * The zip function combines elements from this stream and the given stream until both the current stream or the given stream runs out of elements. * The resulting stream will have the length of the longer of the current stream and the given stream. * If the current stream or the given stream runs out of elements before the other, the provided default values are used. * * @param the type of elements in the given Stream * @param the type of elements in the resulting Stream * @param b the Stream to be combined with the current Stream * @param valueForNoneA the default value to use for the current Stream when it runs out of elements * @param valueForNoneB the default value to use for the given Stream when it runs out of elements * @param zipFunction a BiFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Stream * @see N#zip(Iterable, Iterable, Object, Object, BiFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(final Stream b, final T valueForNoneA, final T2 valueForNoneB, final BiFunction zipFunction); /** * Zips this stream with the given streams using the provided zip function. * The zip function takes elements from this stream and the given streams until either the current stream or one of the given streams runs out of elements. * The resulting stream will have the length of the shortest of the current stream and the given streams. * * @param the type of elements in the second Stream * @param the type of elements in the third Stream * @param the type of elements in the resulting Stream * @param b the second Stream to be combined with the current Stream * @param c the third Stream to be combined with the current Stream * @param zipFunction a TriFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Streams * @see #zipWith(Stream, Stream, Object, Object, Object, TriFunction) * @see N#zip(Iterable, Iterable, Iterable, TriFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(final Stream b, final Stream c, final TriFunction zipFunction); /** * Zips this stream with the given streams using the provided zip function. * The zip function combines elements from this stream and the given streams until both the current stream or the given streams runs out of elements. * The resulting stream will have the length of the longest of the current stream and the given streams. * If the current stream or one of the given streams runs out of elements before the other, the provided default values are used. * * @param the type of elements in the second Stream * @param the type of elements in the third Stream * @param the type of elements in the resulting Stream * @param b the second Stream to be combined with the current Stream * @param c the third Stream to be combined with the current Stream * @param valueForNoneA the default value to use for the current Stream when it runs out of elements * @param valueForNoneB the default value to use for the second Stream when it runs out of elements * @param valueForNoneC the default value to use for the third Stream when it runs out of elements * @param zipFunction a TriFunction that determines the combination of elements in the combined Stream * @return a new Stream that is the result of combining the current Stream with the given Streams * @see N#zip(Iterable, Iterable, Iterable, Object, Object, Object, TriFunction) */ @ParallelSupported @IntermediateOp public abstract Stream zipWith(final Stream b, final Stream c, final T valueForNoneA, final T2 valueForNoneB, final T3 valueForNoneC, final TriFunction zipFunction); /** * Saves each element of this stream to the specified file. * {@code N.stringOf(Object)} is used to convert each element to a string. * {@code UncheckedIOException} may be thrown in the terminal operation if an I/O error occurs. * *
* This is an intermediate operation and will not close the stream. * * @param output the file to save each element to * @return this stream * @throws IllegalStateException if the stream is already closed. * @see #persist(File) * @see N#stringOf(Object) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(File output); /** * Saves each element of this stream to the specified file using the provided function to convert elements to strings. * {@code UncheckedIOException} may be thrown in the terminal operation if an I/O error occurs. * *
* This is an intermediate operation and will not close the stream. * * @param toLine the function to convert each element to a string * @param output the file to save each element to * @return this stream * @throws IllegalStateException if the stream is already closed * @see #persist(Function, File) * @see N#stringOf(Object) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(Function toLine, File output); /** * Saves each element of this stream to the specified output stream using the provided function to convert elements to strings. * {@code UncheckedIOException} may be thrown in the terminal operation if an I/O error occurs. * *
* This is an intermediate operation and will not close the stream. * * @param toLine the function to convert each element to a string * @param output the output stream to save each element to * @return this stream * @throws IllegalStateException if the stream is already closed * @see #persist(Function, OutputStream) * @see N#stringOf(Object) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(Function toLine, OutputStream output); /** * Saves each element of this stream to the specified writer using the provided function to convert elements to strings. * {@code UncheckedIOException} may be thrown in the terminal operation if an I/O error occurs. * *
* This is an intermediate operation and will not close the stream. * * @param toLine the function to convert each element to a string * @param output the writer to save each element to * @return this stream * @throws IllegalStateException if the stream is already * @see #persist(Function, Writer) * @see N#stringOf(Object) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(Function toLine, Writer output); /** * Saves each element of this stream to the specified file using the provided function to write each element. * {@code UncheckedIOException} may be thrown in the terminal operation if an I/O error occurs. * *
* This is an intermediate operation and will not close the stream. * * @param writeLine the function to write each element to the writer * @param output the file to save each element to * @return this stream * @throws IllegalStateException if the stream is already closed * @see #persist(Throwables.BiConsumer, File) * @see N#stringOf(Object) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final Throwables.BiConsumer writeLine, final File output); /** * Saves each element of this stream to the specified writer using the provided function to write each element. * {@code UncheckedIOException} may be thrown in the terminal operation if an I/O error occurs. * *
* This is an intermediate operation and will not close the stream. * * @param writeLine the function to write each element to the writer * @param output the writer to save each element to * @return this stream * @throws IllegalStateException if the stream is already closed * @see #persist(Throwables.BiConsumer, Writer) * @see N#stringOf(Object) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final Throwables.BiConsumer writeLine, final Writer output); /** * Saves each element of this stream to the specified prepared statement using the provided statement setter. * {@code UncheckedSQLException} may be thrown in the terminal operation if an SQL error happens. * *
* This is an intermediate operation and will not close the stream. * * @param stmt the prepared statement used to save each element * @param stmtSetter the function to set each element to the prepared statement * @return this stream * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified prepared statement is null * @see #persist(PreparedStatement, int, long, Throwables.BiConsumer) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final PreparedStatement stmt, final Throwables.BiConsumer stmtSetter); /** * Saves each element of this stream to the specified prepared statement using the provided statement setter. * {@code UncheckedSQLException} may be thrown in the terminal operation if an SQL error happens. * *
* This is an intermediate operation and will not close the stream. * * @param stmt the prepared statement used to save each element * @param batchSize the number of elements to include in each batch. If the batch size is less than 2, batch update won't be used. * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter the function to set each element to the prepared statement * @return this stream * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified prepared statement is {@code null}, or {@code batchSize} or {@code batchIntervalInMillis} is negative * @see #persist(PreparedStatement, int, long, Throwables.BiConsumer) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter); /** * Saves each element of this stream to the specified connection using the provided insert SQL and statement setter. * {@code UncheckedSQLException} may be thrown in the terminal operation if an SQL error happens. * *
* This is an intermediate operation and will not close the stream. * * @param conn the connection used to save each element * @param insertSQL the SQL insert script used to prepare the statement * @param stmtSetter the function to set each element to the prepared statement * @return this stream * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified Connection or insert script is null * @see #persist(Connection, String, int, long, Throwables.BiConsumer) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final Connection conn, final String insertSQL, final Throwables.BiConsumer stmtSetter); /** * Saves each element of this stream to the specified connection using the provided insert SQL and statement setter. * {@code UncheckedSQLException} may be thrown in the terminal operation if an SQL error happens. * *
* This is an intermediate operation and will not close the stream. * * @param conn the connection used to save each element * @param insertSQL the SQL insert script used to prepare the statement * @param batchSize the number of elements to include in each batch. If the batch size is less than 2, batch update won't be used. * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter the function to set each element to the prepared statement * @return this stream * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified Connection or insert script is {@code null}, or {@code batchSize} or {@code batchIntervalInMillis} is negative * @see #persist(Connection, String, int, long, Throwables.BiConsumer) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter); /** * Saves each element of this stream to the specified data source using the provided SQL insert statement. * {@code UncheckedSQLException} may be thrown in the terminal operation if an SQL error happens. * *
* This is an intermediate operation and will not close the stream. * * @param ds the data source used to save each element * @param insertSQL the SQL insert script used to prepare the statement * @param stmtSetter the function to set each element to the prepared statement * @return this stream * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified DataSource or insert script is null * @see #persist(javax.sql.DataSource, String, int, long, Throwables.BiConsumer) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final javax.sql.DataSource ds, final String insertSQL, final Throwables.BiConsumer stmtSetter); /** * Saves each element of this stream to the specified data source using the provided SQL insert statement. * {@code UncheckedSQLException} may be thrown in the terminal operation if an SQL error happens. * *
* This is an intermediate operation and will not close the stream. * * @param ds the data source used to save each element * @param insertSQL the SQL insert script used to prepare the statement * @param batchSize the number of elements to include in each batch. If the batch size is less than 2, batch update won't be used. * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter the function to set each element to the prepared statement * @return this stream * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified DataSource or insert script is {@code null}, or {@code batchSize} or {@code batchIntervalInMillis} is negative * @see #persist( javax.sql.DataSource, String, int, long, Throwables.BiConsumer) * @see #onEach(Consumer) */ @Beta @SequentialOnly @IntermediateOp public abstract Stream saveEach(final javax.sql.DataSource ds, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter); /** * Persists the stream to the specified file. * {@code N.stringOf(Object)} is used to convert each element to a string. * *
* This is a terminal operation and will close the stream. * * @param output the file to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(File output) throws IOException; /** * Persists the stream to the specified file with a header and tail. * {@code N.stringOf(Object)} is used to convert each element to a string. * *
* This is a terminal operation and will close the stream. * * @param header the header line to be written at the beginning of the file * @param tail the tail line to be written at the end of the file * @param output the file to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(String header, String tail, File output) throws IOException; /** * Persists the stream to the specified output stream using the provided function to convert elements to strings. * *
* This is a terminal operation and will close the stream. * * @param toLine the function to convert each element to a string * @param output the output stream to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(Function toLine, File output) throws IOException; /** * Persists the stream to the specified output stream with a header and tail using the provided function to convert elements to strings. * *
* This is a terminal operation and will close the stream. * * @param header the header line to be written at the beginning of the file * @param tail the tail line to be written at the end of the file * @param toLine the function to convert each element to a string * @param output the output stream to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(String header, String tail, Function toLine, File output) throws IOException; /** * Persists the stream to the specified output stream using the provided function to convert elements to strings. * *
* This is a terminal operation and will close the stream. * * @param toLine the function to convert each element to a string * @param output the output stream to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(Function toLine, OutputStream output) throws IOException; /** * Persists the stream to the specified output stream with a header and tail using the provided function to convert elements to strings. * *
* This is a terminal operation and will close the stream. * * @param header the header line to be written at the beginning of the file * @param tail the tail line to be written at the end of the file * @param toLine the function to convert each element to a string * @param output the output stream to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(String header, String tail, Function toLine, OutputStream output) throws IOException; /** * Persists the stream to the specified writer using the provided function to convert elements to strings. * *
* This is a terminal operation and will close the stream. * * @param toLine the function to convert each element to a string * @param output the writer to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(Function toLine, Writer output) throws IOException; /** * Persists the stream to the specified writer with a header and tail using the provided function to convert elements to strings. * *
* This is a terminal operation and will close the stream. * * @param header the header line to be written at the beginning of the file * @param tail the tail line to be written at the end of the file * @param toLine the function to convert each element to a string * @param output the writer to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs * @see N#stringOf(Object) */ @SequentialOnly @TerminalOp public abstract long persist(String header, String tail, Function toLine, Writer output) throws IOException; /** * Persists the stream to the specified file using the provided function to convert elements to strings. * *
* This is a terminal operation and will close the stream. * * @param writeLine the function to write each element to the writer * @param output the file to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persist(final Throwables.BiConsumer writeLine, final File output) throws IOException; /** * Persists the stream to the specified file with a header and tail using the provided function to write each element. * *
* This is a terminal operation and will close the stream. * * @param header the header line to be written at the beginning of the file * @param tail the tail line to be written at the end of the file * @param writeLine the function to write each element to the writer * @param output the file to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persist(final String header, final String tail, final Throwables.BiConsumer writeLine, final File output) throws IOException; /** * Persists the stream to the specified writer using the provided function to write each element. * *
* This is a terminal operation and will close the stream. * * @param writeLine the function to write each element to the writer * @param output the writer to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persist(final Throwables.BiConsumer writeLine, final Writer output) throws IOException; /** * Persists the stream to the specified writer with a header and tail using the provided function to write each element. * *
* This is a terminal operation and will close the stream. * * @param header the header line to be written at the beginning of the file * @param tail the tail line to be written at the end of the file * @param writeLine the function to write each element to the writer * @param output the writer to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persist(final String header, final String tail, final Throwables.BiConsumer writeLine, final Writer output) throws IOException; /** * Persists the stream to the specified prepared statement using the provided statement setter. * *
* This is a terminal operation and will close the stream. * * @param stmt the prepared statement used to persist the stream * @param batchSize the number of elements to include in each batch. If the batch size is less than 2, batch update won't be used. * @param batchIntervalInMillis the interval in milliseconds between each batch * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified prepared statement is {@code null}, or {@code batchSize} or {@code batchIntervalInMillis} is negative * @throws SQLException if an SQL error occurs */ @SequentialOnly @TerminalOp public abstract long persist(final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException; /** * Persists the stream to the specified connection using the provided insert SQL and statement setter. * *
* This is a terminal operation and will close the stream. * * @param conn the connection used to persist the stream * @param insertSQL the SQL insert script used to prepare the statement * @param batchSize the number of elements to include in each batch. If the batch size is less than 2, batch update won't be used. * @param batchIntervalInMillis the interval in milliseconds between each batch * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified Connection or insert script is {@code null}, or {@code batchSize} or {@code batchIntervalInMillis} is negative * @throws SQLException if an SQL error occurs */ @SequentialOnly @TerminalOp public abstract long persist(final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException; /** * Persists the stream to the specified data source using the provided SQL insert statement. * *
* This is a terminal operation and will close the stream. * * @param ds the data source used to persist the stream * @param insertSQL the SQL insert script used to prepare the statement * @param batchSize the number of elements to include in each batch. If the batch size is less than 2, batch update won't be used. * @param batchIntervalInMillis the interval in milliseconds between each batch * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IllegalArgumentException if the specified DataSource or insert script is {@code null}, or {@code batchSize} or {@code batchIntervalInMillis} is negative * @throws SQLException if an SQL error occurs */ @SequentialOnly @TerminalOp public abstract long persist(final javax.sql.DataSource ds, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException; /** * Persists the stream with CSV format to the specified file. * * The first row will be used as field names if its type is array or list, * or obtain the column names from the first row if its type is bean or map. * Each line in the output file/Writer is an array of JSON String without root bracket. * *
* This is a terminal operation and will close the stream. * * @param output the file to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @Beta @SequentialOnly @TerminalOp public abstract long persistToCSV(File output) throws IOException; /** * Persists the stream with CSV format to the specified file with the specified headers. * Each line in the output file/Writer is an array of JSON String without root bracket. * *
* This is a terminal operation and will close the stream. * * @param csvHeaders the headers to be used for the CSV file * @param output the file to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persistToCSV(Collection csvHeaders, File output) throws IOException; /** * Persists the stream with CSV format to the specified output stream. * * The first row will be used as field names if its type is array or list, * or obtain the column names from the first row if its type is bean or map. * Each line in the output file/Writer is an array of JSON String without root bracket. * *
* This is a terminal operation and will close the stream. * * @param output the output stream to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @Beta @SequentialOnly @TerminalOp public abstract long persistToCSV(OutputStream output) throws IOException; /** * Persists the stream with CSV format to the specified output stream with the specified headers. * Each line in the output file/Writer is an array of JSON String without root bracket. * *
* This is a terminal operation and will close the stream. * * @param csvHeaders the headers to be used for the CSV file * @param output the output stream to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persistToCSV(Collection csvHeaders, OutputStream output) throws IOException; /** * Persists the stream with CSV format to the specified writer. * * The first row will be used as field names if its type is array or list, * or obtain the column names from the first row if its type is bean or map. * Each line in the output file/Writer is an array of JSON String without root bracket. * *
* This is a terminal operation and will close the stream. * * @param output the writer to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @Beta @SequentialOnly @TerminalOp public abstract long persistToCSV(Writer output) throws IOException; /** * Persists the stream with CSV format to the specified writer with the specified headers. * Each line in the output file/Writer is an array of JSON String without root bracket. * *
* This is a terminal operation and will close the stream. * * @param csvHeaders the headers to be used for the CSV file * @param output the writer to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persistToCSV(Collection csvHeaders, Writer output) throws IOException; /** * Persists the stream with JSON format to the specified file. * *
* This is a terminal operation and will close the stream. * * @param output the file to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persistToJSON(File output) throws IOException; /** * Persists the stream with JSON format to the specified output stream. * *
* This is a terminal operation and will close the stream. * * @param output the output stream to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persistToJSON(OutputStream output) throws IOException; /** * Persists the stream with JSON format to the specified writer. * *
* This is a terminal operation and will close the stream. * * @param output the writer to persist the stream to * @return the number of elements persisted * @throws IllegalStateException if the stream is already closed * @throws IOException if an I/O error occurs */ @SequentialOnly @TerminalOp public abstract long persistToJSON(Writer output) throws IOException; // /** // * // * @param // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public Seq checked() { // return Seq. from(this); // } // // /** // * // * @param // * @param exceptionType // * @return // */ // @Beta // @SequentialOnly // @IntermediateOp // public Seq checked(final Class exceptionType) { // return Seq. from(this, exceptionType); // } /** * Converts this Stream to a JDK Stream. * This is an intermediate operation. * * @return a java.util.stream.Stream containing the elements of this Stream * @see #transformB(Function) * @see #transformB(Function, boolean) */ @SequentialOnly @IntermediateOp public abstract java.util.stream.Stream toJdkStream(); /** * Transforms the current Stream into another Stream by applying the provided function. * The function takes a Stream as input and returns a new Stream. * The returned Stream is then wrapped into a Stream of this class. * * @param The type of elements in the returned stream. * @param transfer The function to be applied on the current stream to produce a new stream. * @return A new Stream transformed by the provided function. * @throws IllegalArgumentException if the provided function is {@code null}. * @see #toJdkStream() */ @Beta @SequentialOnly @IntermediateOp public Stream transformB(final Function, ? extends java.util.stream.Stream> transfer) throws IllegalArgumentException { // Major reason for commenting out below lines is to keep consistent with method transform. // assertNotClosed(); // // checkArgNotNull(transfer, "transfer"); // // final Supplier> delayInitializer = () -> Stream.from(transfer.apply(this.toJdkStream())); // // return Stream.defer(delayInitializer); return transformB(transfer, false); } /** * Transforms the current Stream into another Stream by applying the provided function. * The function takes a Stream as input and returns a new Stream. * The returned Stream is then wrapped into a Stream of this class. * The transformation can be deferred, which means it will be performed when the stream is consumed. * * @param The type of elements in the returned stream. * @param transfer The function to be applied on the current stream to produce a new stream. * @param deferred If {@code true}, the transformation is deferred until the stream is consumed. * @return A new Stream transformed by the provided function. * @throws IllegalStateException if the stream has already been operated upon or closed. * @throws IllegalArgumentException if the provided function is {@code null}. * @see #toJdkStream() */ @Beta @SequentialOnly @IntermediateOp public Stream transformB(final Function, ? extends java.util.stream.Stream> transfer, final boolean deferred) throws IllegalStateException, IllegalArgumentException { assertNotClosed(); checkArgNotNull(transfer, cs.transfer); if (deferred) { final Supplier> delayInitializer = () -> Stream.from(transfer.apply(this.toJdkStream())); return Stream.defer(delayInitializer); } else { return Stream.from(transfer.apply(this.toJdkStream())); } } abstract ObjIteratorEx iteratorEx(); /** * Temporarily switches the stream to a parallel stream for the operation {@code op} and then switches back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code split(chunkSize).parallel(maxThreadNum).flatMap(op::apply).sequential()} * * @param The type of elements in the returned stream. * @param maxThreadNum The maximum number of threads to be used for the parallel operation. * @param chunkSize The chunk size this stream will be split into for the parallel operation. * @param op The operation will take a chunk of elements and return a new Stream executed repeatedly by multiple threads. * @return A new Stream transformed by the provided function. * @throws IllegalArgumentException if the specified maxThreadNum or chunkSize is equal to or less than 0 */ @Beta @IntermediateOp public Stream sps(final int maxThreadNum, final int chunkSize, final Function, ? extends Stream> op) throws IllegalArgumentException { assertNotClosed(); checkArgPositive(maxThreadNum, cs.maxThreadNum); checkArgPositive(chunkSize, cs.chunkSize); final int executorNumForVirtualThread = 0; if (isParallel() && maxThreadNum == maxThreadNum() && executorNumForVirtualThread == executorNumForVirtualThread()) { //noinspection resource return split(chunkSize).flatMap(op::apply).sequential(); } else { final AsyncExecutor asyncExecutor = asyncExecutor(); final int checkedMaxThreadNum = checkMaxThreadNum(maxThreadNum, executorNumForVirtualThread, asyncExecutor); final int checkedVirtualTaskNum = checkExecutorNumForVirtualThread(checkedMaxThreadNum, executorNumForVirtualThread); //noinspection resource return split(chunkSize).parallel(checkedMaxThreadNum, checkedVirtualTaskNum, splitor(), asyncExecutor, cancelUncompletedThreads()) .flatMap(op::apply) .sequential(); } } /** * Temporarily switches the stream to a parallel stream for the operation {@code filter} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel().filter(predicate).sequential()} * * @param predicate The predicate to be used for the filter operation on the stream. * @return A new Stream that has been filtered in parallel using the provided predicate. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsFilter(final Predicate predicate) { return sps(s -> s.filter(predicate)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code map} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel().map(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param mapper The mapping function to apply to each element. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsMap(final Function mapper) { return sps(s -> s.map(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel().flatMap(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param mapper The mapping function to apply to each element, which produces a stream of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsFlatMap(final Function> mapper) { return sps(s -> s.flatMap(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatmap} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel().flatmap(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param mapper The mapping function to apply to each element, which produces a collection of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsFlatmap(final Function> mapper) { return sps(s -> s.flatmap(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code onEach} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel().onEach(action).sequential()} * * @param action The action to be performed on each element of the stream. * @return A new Stream with the specified action applied in parallel. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsOnEach(final Consumer action) { return sps(s -> s.onEach(action)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code filter} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel(maxThreadNum).filter(predicate).sequential()} * * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param predicate The predicate to apply to each element to determine if it should be included. * @return A new Stream with the specified filter applied in parallel. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsFilter(final int maxThreadNum, final Predicate predicate) { return sps(maxThreadNum, s -> s.filter(predicate)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code map} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel(maxThreadNum).map(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param mapper The mapping function to apply to each element. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsMap(final int maxThreadNum, final Function mapper) { return sps(maxThreadNum, s -> s.map(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel(maxThreadNum).flatMap(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param mapper The mapping function to apply to each element, which produces a stream of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsFlatMap(final int maxThreadNum, final Function> mapper) { return sps(maxThreadNum, s -> s.flatMap(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatmap} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel(maxThreadNum).flatmap(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param mapper The mapping function to apply to each element, which produces a collection of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsFlatmap(final int maxThreadNum, final Function> mapper) { return sps(maxThreadNum, s -> s.flatmap(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code onEach} and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code stream().parallel(maxThreadNum).onEach(action).sequential()} * * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param action The action to be performed on each element of the stream. * @return A new Stream with the specified action applied in parallel. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsOnEach(final int maxThreadNum, final Consumer action) { return sps(maxThreadNum, s -> s.onEach(action)); } /** * Temporarily switch the stream to parallel stream for operation {@code filter} and then switch back to stream stream. * This method is useful for performing a specific operation in parallel while keeping the rest of the stream operations sequential. * *
* @implNote it's equivalent to: {@code split(chunkSize).parallel(maxThreadNum).flatMap(s -> Stream.of(s).filter(predicate)).sequential()} * * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param chunkSize The size of chunks to split the stream into for parallel processing. * @param predicate The predicate to be used for the filter operation on the stream. * @return A new Stream that has been filtered in parallel using the provided predicate. * @see #sps(int, int, Function) */ @Beta @IntermediateOp public Stream spsFilter(final int maxThreadNum, final int chunkSize, final Predicate predicate) { //noinspection resource return sps(maxThreadNum, chunkSize, s -> Stream.of(s).filter(predicate)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code map} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code split(chunkSize).parallel(maxThreadNum).flatMap(s -> Stream.of(s).map(mapper)).sequential()} * * @param The type of the elements in the resulting stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param chunkSize The size of chunks to split the stream into for parallel processing. * @param mapper The function to be applied to each element of the stream. * @return A new Stream that has been mapped in parallel using the provided mapper function. * @see #sps(int, int, Function) */ @Beta @IntermediateOp public Stream spsMap(final int maxThreadNum, final int chunkSize, final Function mapper) { //noinspection resource return sps(maxThreadNum, chunkSize, s -> Stream.of(s).map(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code split(chunkSize).parallel(maxThreadNum).flatMap(s -> Stream.of(s).flatMap(mapper)).sequential()} * * @param The type of the elements in the resulting stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param chunkSize The size of chunks to split the stream into for parallel processing. * @param mapper The function to be applied to each element of the stream. * @return A new Stream that has been flat-mapped in parallel using the provided mapper function. * @see #sps(int, int, Function) */ @Beta @IntermediateOp public Stream spsFlatMap(final int maxThreadNum, final int chunkSize, final Function> mapper) { //noinspection resource return sps(maxThreadNum, chunkSize, s -> Stream.of(s).flatMap(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code split(chunkSize).parallel(maxThreadNum).flatMap(s -> Stream.of(s).flatmap(mapper)).sequential()} * * @param The type of the elements in the resulting stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param chunkSize The size of chunks to split the stream into for parallel processing. * @param mapper The function to be applied to each element of the stream. * @return A new Stream that has been flat-mapped in parallel using the provided mapper function. * @see #sps(int, int, Function) */ @Beta @IntermediateOp public Stream spsFlatmap(final int maxThreadNum, final int chunkSize, final Function> mapper) { //noinspection resource return sps(maxThreadNum, chunkSize, s -> Stream.of(s).flatmap(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code onEach} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code split(chunkSize).parallel(maxThreadNum).flatMap(s -> Stream.of(s).onEach(action)).sequential()} * * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param chunkSize The size of chunks to split the stream into for parallel processing. * @param action The action to be performed on each element of the stream. * @return A new Stream with the specified action applied in parallel. * @see #sps(int, int, Function) */ @Beta @IntermediateOp public Stream spsOnEach(final int maxThreadNum, final int chunkSize, final Consumer action) { //noinspection resource return sps(maxThreadNum, chunkSize, s -> Stream.of(s).onEach(action)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code filter} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel().filterE(predicate).sequential()} * * @param predicate The predicate to be used for the filter operation on the stream. * @return A new Stream that has been filtered in parallel using the provided predicate. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsFilterE(final Throwables.Predicate predicate) { return sps(s -> s.filterE(predicate)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code map} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel().mapE(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param mapper The mapping function to apply to each element. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsMapE(final Throwables.Function mapper) { return sps(s -> s.mapE(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel().flatMapE(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param mapper The mapping function to apply to each element, which produces a stream of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsFlatMapE(final Throwables.Function, ? extends Exception> mapper) { return sps(s -> s.flatMapE(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel().flatmapE(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param mapper The mapping function to apply to each element, which produces a collection of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsFlatmapE(final Throwables.Function, ? extends Exception> mapper) { return sps(s -> s.flatmapE(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code onEach} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel().onEachE(action).sequential()} * * @param action The action to be performed on each element of the stream. * @return A new Stream with the specified action applied in parallel. * @see #sps(Function) */ @Beta @IntermediateOp public Stream spsOnEachE(final Throwables.Consumer action) { return sps(s -> s.onEachE(action)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code filter} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel(maxThreadNum).filterE(predicate).sequential()} * * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param predicate The predicate to be used for the filter operation on the stream. * @return A new Stream that has been filtered in parallel using the provided predicate. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsFilterE(final int maxThreadNum, final Throwables.Predicate predicate) { return sps(maxThreadNum, s -> s.filterE(predicate)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code map} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel(maxThreadNum).mapE(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param mapper The mapping function to apply to each element. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsMapE(final int maxThreadNum, final Throwables.Function mapper) { return sps(maxThreadNum, s -> s.mapE(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel(maxThreadNum).flatMapE(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param mapper The mapping function to apply to each element, which produces a stream of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsFlatMapE(final int maxThreadNum, final Throwables.Function, ? extends Exception> mapper) { return sps(maxThreadNum, s -> s.flatMapE(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code flatMap} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel(maxThreadNum).flatmapE(mapper).sequential()} * * @param The type of the elements in the returned stream. * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param mapper The mapping function to apply to each element, which produces a collection of new values. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsFlatmapE(final int maxThreadNum, final Throwables.Function, ? extends Exception> mapper) { return sps(maxThreadNum, s -> s.flatmapE(mapper)); } /** * Temporarily switches the stream to a parallel stream for the operation {@code onEach} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().parallel(maxThreadNum).onEachE(action).sequential()} * * @param maxThreadNum The maximum number of threads to be used for parallel execution. * @param action The action to be performed on each element of the stream. * @return A new Stream with the specified action applied in parallel. * @see #sps(int, Function) */ @Beta @IntermediateOp public Stream spsOnEachE(final int maxThreadNum, final Throwables.Consumer action) { return sps(maxThreadNum, s -> s.onEachE(action)); } /** * Temporarily switches the stream to a JDK parallel stream for the operation {@code op} * and then switches it back to a sequential stream. * This method is useful for performing a specific operation in parallel while keeping * the rest of the stream operations sequential. * *
* @implNote It's equivalent to: {@code stream().toJdkStream().parallel().op(map/filter/...).sequential()} * * @param The type of the elements in the resulting stream. * @param op The function to be applied to the JDK parallel stream. * @return A new Stream with the specified operation applied in parallel. */ @Beta @IntermediateOp public Stream sjps(final Function, ? extends java.util.stream.Stream> op) { if (isParallel()) { //noinspection resource return newStream(((java.util.stream.Stream) op.apply(this.toJdkStream())).iterator(), false, null).sequential(); //NOSONAR } else { return newStream(((java.util.stream.Stream) op.apply(this.toJdkStream().parallel())).iterator(), false, null); } } // No performance improvement. // /** // * Temporarily switch the stream to Jdk parallel stream for operation {@code ops} and then switch back to stream stream. // *
// * {@code stream.(switchToJdkStream).parallel().ops(map/filter/...).(switchBack).stream()} // * // * @param // * @param op // * @return // */ // @Beta // @IntermediateOp // public IntStream sjpis(Function, ? extends java.util.stream.IntStream> op) { // if (this.isParallel()) { // return IntStream.of(op.apply(this.toJdkStream())).sequential(); // } else { // return IntStream.of(op.apply(this.toJdkStream().parallel())).sequential(); // } // } // // /** // * Temporarily switch the stream to Jdk parallel stream for operation {@code ops} and then switch back to stream stream. // *
// * {@code stream.(switchToJdkStream).parallel().ops(map/filter/...).(switchBack).stream()} // * // * @param // * @param op // * @return // */ // @Beta // @IntermediateOp // public LongStream sjpls(Function, ? extends java.util.stream.LongStream> op) { // if (this.isParallel()) { // return LongStream.of(op.apply(this.toJdkStream())).sequential(); // } else { // return LongStream.of(op.apply(this.toJdkStream().parallel())).sequential(); // } // } // // /** // * Temporarily switch the stream to Jdk parallel stream for operation {@code ops} and then switch back to stream stream. // *
// * {@code stream.(switchToJdkStream).parallel().ops(map/filter/...).(switchBack).stream()} // * // * @param // * @param op // * @return // */ // @Beta // @IntermediateOp // public DoubleStream sjpds(Function, ? extends java.util.stream.DoubleStream> op) { // if (this.isParallel()) { // return DoubleStream.of(op.apply(this.toJdkStream())).sequential(); // } else { // return DoubleStream.of(op.apply(this.toJdkStream().parallel())).sequential(); // } // } /** * Filters the elements of this stream using the provided predicate. * * @param predicate the predicate to apply to each element to determine if it should be included * @return a new stream that contains only the elements that match the predicate * @throws Exception if the predicate throws an exception * @see Fn#pp(Throwables.Predicate) */ @Beta @ParallelSupported @IntermediateOp public Stream filterE(final Throwables.Predicate predicate) { return filter(Fn.pp(predicate)); } /** * Transforms the elements in the stream by applying a function to each element. * This is an intermediate operation. * * @param The type of the result elements. * @param mapper The function to be applied to each element in the stream. * @return A new Stream consisting of the results of applying the given function to the elements of this stream. * @throws Exception if the mapper function throws an exception * @see Fn#ff(Throwables.Function) */ @Beta @ParallelSupported @IntermediateOp public Stream mapE(final Throwables.Function mapper) { return map(Fn.ff(mapper)); } /** * Transforms the elements in the stream by applying a function that returns a stream to each element, * and then flattens the resulting streams into a single stream. * This is an intermediate operation. * * @param The type of the result elements. * @param mapper The function to be applied to each element in the stream, which returns a stream. * @return A new Stream consisting of the results of applying the given function to the elements of this stream, * and then flattening the resulting streams. * @throws Exception if the mapper function throws an exception * @see Fn#ff(Throwables.Function) */ @Beta @ParallelSupported @IntermediateOp public Stream flatMapE(final Throwables.Function, ? extends Exception> mapper) { return flatMap(Fn.ff(mapper)); } /** * Transforms the elements in the stream by applying a function that returns a collection to each element, * and then flattens the resulting collections into a single stream. * This is an intermediate operation. * * @param The type of the result elements. * @param mapper The function to be applied to each element in the stream, which returns a collection. * @return A new Stream consisting of the results of applying the given function to the elements of this stream, * and then flattening the resulting collections. * @throws Exception if the mapper function throws an exception * @see Fn#ff(Throwables.Function) */ @Beta @ParallelSupported @IntermediateOp public Stream flatmapE(final Throwables.Function, ? extends Exception> mapper) { return flatmap(Fn.ff(mapper)); } /** * Performs the provided action for each element of the Stream. * The action is a Consumer that can throw an exception. * * @param action The action to be performed for each element, which can throw an exception * @return The new Stream * @throws Exception if the action throws an exception */ @Beta @ParallelSupported @IntermediateOp public Stream onEachE(final Throwables.Consumer action) { return onEach(Fn.cc(action)); } // /** // * // * // * @param action // * @return // * @see Fn#cc(Throwables.Consumer) // */ // @Beta // @ParallelSupported // @IntermediateOp // public Stream saveOnEachE(final Throwables.Consumer action) { // return onEach(Fn.cc(action)); // } // /** // * To reduce the memory footprint, Only one instance of DisposableEntry is created, // * and the same entry instance is returned and set with different keys/values during iteration of the returned stream. // * The elements only can be retrieved one by one, can't be modified or saved. // * The returned Stream doesn't support the operations which require two or more elements at the same time: (e.g. sort/distinct/pairMap/slidingMap/sliding/split/toList/toSet/...). // * , and can't be parallel stream. // * Operations: filter/map/toMap/groupBy/groupTo/... are supported. // * // * @param // * @param // * @param keyMapper // * @param valueMapper // * @return // * @throws IllegalStateException // * @see DisposableEntry // * @see NoCachingNoUpdating // * @deprecated // */ // @Beta // @SequentialOnly // @Deprecated // public Stream> mapToDisposableEntry(final Function keyMapper, // final Function valueMapper) throws IllegalStateException { // assertNotClosed(); // checkState(!isParallel(), "mapToDisposableEntry can't be applied to parallel stream"); // // final Function> mapper = new Function<>() { // private final EntryStream.ReusableEntry entry = new EntryStream.ReusableEntry<>(); // // @Override // public DisposableEntry apply(final T t) { // entry.set(keyMapper.apply(t), valueMapper.apply(t)); // // return entry; // } // }; // // return map(mapper); // } // #######################################9X9####################################### // #######################################9X9####################################### /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @return * @see
What is the difference between "INNER JOIN" and "OUTER JOIN" */ @SequentialOnly @IntermediateOp public abstract Stream> crossJoin(Collection b); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @SequentialOnly @IntermediateOp public abstract Stream crossJoin(Collection b, BiFunction func); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b will be loaded to memory. If {@code b} is too big to load to memory, please use {@code b.cronJoin(this, ...)}. It will be closed along with this {@code Stream}. * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @SequentialOnly @IntermediateOp public abstract Stream crossJoin(Stream b, BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> innerJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream innerJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param keyMapper * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> innerJoin(Collection b, Function keyMapper); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param keyMapper * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream innerJoin(Collection b, Function keyMapper, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b will be loaded to memory. If {@code b} is too big to load to memory, please use {@code b.innerJoin(this, ...)}. It will be closed along with this {@code Stream}. * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream innerJoin(Stream b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param predicate * @return * @deprecated The time complexity is O(n * m). You should try {@code innerJoin(Collection, Function, Function)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream> innerJoin(Collection b, BiPredicate predicate); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param predicate * @param func * @return * @deprecated The time complexity is O(n * m). You should try {@code innerJoin(Collection, Function, Function, BiFunction)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream innerJoin(Collection b, BiPredicate predicate, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> fullJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream fullJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param keyMapper * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> fullJoin(Collection b, Function keyMapper); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param keyMapper * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream fullJoin(Collection b, Function keyMapper, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b will be loaded to memory. If {@code b} is too big to load to memory, please use {@code b.fullJoin(this, ...)}. It will be closed along with this {@code Stream}. * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream fullJoin(Stream b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param predicate * @return * @deprecated The time complexity is O(n * m). You should try {@code fullJoin(Collection, Function, Function)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream> fullJoin(Collection b, BiPredicate predicate); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param predicate * @param func * @return * @deprecated The time complexity is O(n * m). You should try {@code fullJoin(Collection, Function, Function, BiFunction)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream fullJoin(Collection b, BiPredicate predicate, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> leftJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream leftJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param keyMapper * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> leftJoin(Collection b, Function keyMapper); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param keyMapper * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream leftJoin(Collection b, Function keyMapper, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b will be loaded to memory. If {@code b} is too big to load to memory, please use {@code b.leftJoin(this, ...)}. It will be closed along with this {@code Stream}. * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream leftJoin(Stream b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param predicate * @return * @deprecated The time complexity is O(n * m). You should try {@code leftJoin(Collection, Function, Function)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream> leftJoin(Collection b, BiPredicate predicate); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param predicate * @param func * @return * @deprecated The time complexity is O(n * m). You should try {@code leftJoin(Collection, Function, Function, BiFunction)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream leftJoin(Collection b, BiPredicate predicate, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> rightJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream rightJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param keyMapper * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream> rightJoin(Collection b, Function keyMapper); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param keyMapper * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream rightJoin(Collection b, Function keyMapper, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b will be loaded to memory. If {@code b} is too big to load to memory, please use {@code b.rightJoin(this, ...)}. It will be closed along with this {@code Stream}. * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @ParallelSupported @IntermediateOp public abstract Stream rightJoin(Stream b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction func); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param predicate * @return * @deprecated The time complexity is O(n * m). You should try {@code rightJoin(Collection, Function, Function)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream> rightJoin(Collection b, BiPredicate predicate); /** * The time complexity is O(n * m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param predicate * @param func * @return * @deprecated The time complexity is O(n * m). You should try {@code rightJoin(Collection, Function, Function, BiFunction)} first. * @see What is the difference between "INNER JOIN" and "OUTER JOIN" */ @Deprecated @ParallelSupported @IntermediateOp public abstract Stream rightJoin(Collection b, BiPredicate predicate, final BiFunction func); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @return */ @ParallelSupported @IntermediateOp public abstract Stream>> groupJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction, ? extends R> func); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param b * @param keyMapper * @return */ @ParallelSupported @IntermediateOp public abstract Stream>> groupJoin(Collection b, Function keyMapper); /** * * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param keyMapper * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Collection b, Function keyMapper, final BiFunction, ? extends R> func); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b will be loaded to memory. If {@code b} is too big to load to memory, please use {@code b.groupJoin(this, ...)}. It will be closed along with this {@code Stream}. * @param leftKeyExtractor * @param rightKeyExtractor * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Stream b, Function leftKeyExtractor, Function rightKeyExtractor, final BiFunction, ? extends R> func); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param mergeFunction * @return */ @ParallelSupported @IntermediateOp public abstract Stream> groupJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, BinaryOperator mergeFunction); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param mergeFunction * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, BinaryOperator mergeFunction, final BiFunction func); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param mergeFunction * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Stream b, Function leftKeyExtractor, Function rightKeyExtractor, BinaryOperator mergeFunction, final BiFunction func); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param downstream * @return */ @ParallelSupported @IntermediateOp public abstract Stream> groupJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, Collector downstream); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param * @param b * @param leftKeyExtractor * @param rightKeyExtractor * @param downstream * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Collection b, Function leftKeyExtractor, Function rightKeyExtractor, Collector downstream, final BiFunction func); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param b * @param keyMapper * @param downstream * @return */ @ParallelSupported @IntermediateOp public abstract Stream> groupJoin(Collection b, Function keyMapper, Collector downstream); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param b * @param keyMapper * @param downstream * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Collection b, Function keyMapper, Collector downstream, final BiFunction func); /** * The time complexity is O(n + m) : n is the size of this {@code Stream} and m is the size of specified collection {@code b}. * * @param * @param * @param * @param * @param b will be loaded to memory. If {@code b} is too big to load to memory, please use {@code b.groupJoin(this, ...)}. It will be closed along with this {@code Stream}. * @param leftKeyExtractor * @param rightKeyExtractor * @param downstream * @param func * @return */ @ParallelSupported @IntermediateOp public abstract Stream groupJoin(Stream b, Function leftKeyExtractor, Function rightKeyExtractor, Collector downstream, final BiFunction func); /** * * @param * @param b should be ordered * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream>> joinByRange(final Iterator b, final BiPredicate predicate); /** * * @param * @param * @param b should be ordered * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @param collector * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream> joinByRange(final Iterator b, final BiPredicate predicate, final Collector collector); /** * * @param * @param * @param * @param b should be ordered * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @param collector * @param func * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream joinByRange(final Iterator b, final BiPredicate predicate, final Collector collector, BiFunction func); /** * * @param * @param * @param * @param b should be ordered. It will be closed along with this {@code Stream} * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @param collector * @param func * @param mapperForUnJoinedElements map the remaining elements in provided {@code Iterator} to a Stream which will be appended to the end of the returned Stream. * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream joinByRange(final Iterator b, final BiPredicate predicate, final Collector collector, BiFunction func, Function, Stream> mapperForUnJoinedElements); /** * * @param * @param b should be ordered. It will be closed along with this {@code Stream} * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream>> joinByRange(final Stream b, final BiPredicate predicate); /** * * @param * @param * @param b should be ordered. It will be closed along with this {@code Stream} * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @param collector * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream> joinByRange(final Stream b, final BiPredicate predicate, final Collector collector); /** * * @param * @param * @param * @param b should be ordered. It will be closed along with this {@code Stream} * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @param collector * @param func * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream joinByRange(final Stream b, final BiPredicate predicate, final Collector collector, final BiFunction func); /** * * @param * @param * @param * @param b should be ordered. It will be closed along with this {@code Stream} * @param predicate check if the element from {@code b} can be joined with current element from {@code this Stream}. * @param collector * @param func * @param mapperForUnJoinedElements map the remaining elements in provided {@code Stream} to a Stream which will be appended to the end of the returned Stream. * @return */ @Beta @SequentialOnly @IntermediateOp public abstract Stream joinByRange(final Stream b, final BiPredicate predicate, final Collector collector, final BiFunction func, Function, Stream> mapperForUnJoinedElements); /** * Attaches a new stream with terminal action to consume the elements from upstream. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @return the main stream with the attached subscriber * @see #addSubscriber(Throwables.Consumer, int, long, Executor) */ @Beta @SequentialOnly @IntermediateOp public Stream addSubscriber(final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction) { return addSubscriber(consumerForNewStreamWithTerminalAction, DEFAULT_BUFFERED_SIZE_PER_ITERATOR, MAX_WAIT_TIME_FOR_QUEUE_OFFER_FOR_ADD_SUBSCRIBER, Stream.executor()); } /** * Attaches a new stream with terminal action to consume the elements from upstream. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * To get the return value of the attached stream, An output parameter can be used. for example: *
     * 
     *     final Holder resultHolder = new Holder<>();
     *     thisStream.addSubscriber(newStream -> resultHolder.set(newStream.filter(...).map(...).join(",")))...;
     * 
     * 
* * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @param queueSize the size of the queue. Default value is 64. * @param maxWaitForAddingElementToQuery max wait time to add next element to queue for subscriber stream to consumer. Default value is 30000 (unit is milliseconds). * If the next element can't be added to queue after waiting for period, exception will be thrown in subscriber stream. * @param executor the executor to run the attached stream * @return the main stream with the attached subscriber * @throws IllegalStateException * @throws IllegalArgumentException */ @Beta @SequentialOnly @IntermediateOp public Stream addSubscriber(final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final int queueSize, final long maxWaitForAddingElementToQuery, final Executor executor) throws IllegalStateException, IllegalArgumentException { assertNotClosed(); checkArgNotNull(consumerForNewStreamWithTerminalAction, cs.consumerForNewStreamWithTerminalAction); checkArgPositive(queueSize, cs.queueSize); checkArgPositive(maxWaitForAddingElementToQuery, cs.maxWaitForAddingElementToQuery); checkArgNotNull(executor, cs.executor); return addSubscriberForAll(consumerForNewStreamWithTerminalAction, queueSize, maxWaitForAddingElementToQuery, executor); } private Stream addSubscriberForAll(final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final int queueSize, final long maxWaitForAddingElementToQuery, final Executor executor) { final BlockingQueue queue = new ArrayBlockingQueue<>(queueSize <= 0 ? DEFAULT_BUFFERED_SIZE_PER_ITERATOR : queueSize); final ObjIterator elements = iteratorEx(); final T none = (T) NONE; final MutableBoolean isMainStreamCompleted = MutableBoolean.of(false); final MutableBoolean isSubscriberStreamCompleted = MutableBoolean.of(false); final MutableBoolean isFailedToOfferToQueue = MutableBoolean.of(false); final MutableInt nextCallCount = MutableInt.of(0); // it should end with 0 if there is no exception happening during hasNext()/next() call. final ObjIteratorEx iterForSubscriberStream = new ObjIteratorEx<>() { //NOSONAR private final MutableBoolean isExceptionThrown = MutableBoolean.of(false); private T next = null; @Override public boolean hasNext() { if ((next == null) && ((isFailedToOfferToQueue.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)) { try { do { next = queue.poll(MAX_WAIT_TIME_FOR_QUEUE_POLL, TimeUnit.MILLISECONDS); checkNextCallCountInMainStream(nextCallCount, isMainStreamCompleted, isExceptionThrown); } while (next == null && ((isFailedToOfferToQueue.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)); } catch (final InterruptedException e) { throw toRuntimeException(e); } } checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); return next != null || (isMainStreamCompleted.isTrue() && elements.hasNext()); } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T ret = next != null ? (next == none ? null : next) : elements.next(); next = null; return ret; } @Override public void close() { isSubscriberStreamCompleted.setTrue(); queue.clear(); checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); } }; final ObjIteratorEx iter = new ObjIteratorEx<>() { //NOSONAR private boolean isNewStreamStarted = false; private ContinuableFuture futureForNewStream = null; private boolean hasNext = false; private T next = null; @Override public boolean hasNext() { if (!hasNext) { nextCallCount.increment(); try { hasNext = elements.hasNext(); nextCallCount.decrement(); } finally { if (nextCallCount.value() > 0) { // exception happened. set nextCallCount to 2. nextCallCount.increment(); } } } return hasNext; } @Override public T next() { nextCallCount.increment(); try { next = elements.next(); nextCallCount.decrement(); } finally { hasNext = false; if (nextCallCount.value() > 0) { // exception happened. set nextCallCount to 2. nextCallCount.increment(); } } if (!isNewStreamStarted) { startNewStream(); } if (isFailedToOfferToQueue.isFalse() && isSubscriberStreamCompleted.isFalse()) { try { if (!queue.offer(next == null ? none : next, maxWaitForAddingElementToQuery, TimeUnit.MILLISECONDS) && isSubscriberStreamCompleted.isFalse()) { isFailedToOfferToQueue.setTrue(); } } catch (final Exception e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Failed to add element to queue for subscriber stream to consume"); isFailedToOfferToQueue.setTrue(); } } return next; } @Override public void close() { isMainStreamCompleted.setTrue(); if (!isNewStreamStarted) { startNewStream(); } if (futureForNewStream != null) { try { futureForNewStream.get(); } catch (ExecutionException | InterruptedException e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Error happened in waiting for attached stream to close"); } } } private void startNewStream() { isNewStreamStarted = true; if (executor == null) { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction); } else { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction, executor); } } }; return newStream(iter, sorted, cmp, mergeCloseHandlers(iter::close, closeHandlers, true)); } /** * Attaches a new stream with terminal action to consume the elements filtered out by the specified {@code predicate}. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * @param predicate the predicate to test elements * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @return the main stream with the attached subscriber * @see #filterWhileAddSubscriber(Predicate, Throwables.Consumer, int, long, Executor) * @see #addSubscriber(Throwables.Consumer, int, long, Executor) * @see #filter(Predicate, Consumer) */ @Beta @SequentialOnly @IntermediateOp public Stream filterWhileAddSubscriber(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction) { return filterWhileAddSubscriber(predicate, consumerForNewStreamWithTerminalAction, DEFAULT_BUFFERED_SIZE_PER_ITERATOR, MAX_WAIT_TIME_FOR_QUEUE_OFFER_FOR_ADD_SUBSCRIBER, Stream.executor()); } /** * Attaches a new stream with terminal action to consume the elements filtered out by the specified {@code predicate}. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * @param predicate the predicate to test elements * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @param queueSize the size of the queue. Default value is 64. * @param maxWaitForAddingElementToQuery max wait time to add next element to queue for subscriber stream to consumer. Default value is 30000 (unit is milliseconds). * If the next element can't be added to queue after waiting for period, exception will be thrown in subscriber stream. * @param executor the executor to run the attached stream * @return the main stream with the attached subscriber * @throws IllegalStateException * @throws IllegalArgumentException * @see #addSubscriber(Throwables.Consumer, int, long, Executor) * @see #filter(Predicate, Consumer) */ @Beta @SequentialOnly @IntermediateOp public Stream filterWhileAddSubscriber(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final int queueSize, final long maxWaitForAddingElementToQuery, final Executor executor) throws IllegalStateException, IllegalArgumentException { assertNotClosed(); checkArgNotNull(predicate, cs.predicate); checkArgNotNull(consumerForNewStreamWithTerminalAction, cs.consumerForNewStreamWithTerminalAction); checkArgPositive(queueSize, cs.queueSize); checkArgPositive(maxWaitForAddingElementToQuery, cs.maxWaitForAddingElementToQuery); checkArgNotNull(executor, cs.executor); return addSubscriberForFilter(predicate, consumerForNewStreamWithTerminalAction, queueSize, maxWaitForAddingElementToQuery, executor); } private Stream addSubscriberForFilter(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final int queueSize, final long maxWaitForAddingElementToQuery, final Executor executor) { final BlockingQueue queue = new ArrayBlockingQueue<>(queueSize <= 0 ? DEFAULT_BUFFERED_SIZE_PER_ITERATOR : queueSize); final ObjIterator elements = iteratorEx(); final T none = (T) NONE; final MutableBoolean isMainStreamCompleted = MutableBoolean.of(false); final MutableBoolean isSubscriberStreamCompleted = MutableBoolean.of(false); final MutableBoolean isFailedToOfferToQueue = MutableBoolean.of(false); final MutableInt nextCallCount = MutableInt.of(0); // it should end with 0 if there is no exception happening during hasNext()/next() call. final ObjIteratorEx iterForSubscriberStream = new ObjIteratorEx<>() { //NOSONAR private final MutableBoolean isExceptionThrown = MutableBoolean.of(false); private T next = null; @Override public boolean hasNext() { if ((next == null) && ((isFailedToOfferToQueue.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)) { try { do { next = queue.poll(MAX_WAIT_TIME_FOR_QUEUE_POLL, TimeUnit.MILLISECONDS); checkNextCallCountInMainStream(nextCallCount, isMainStreamCompleted, isExceptionThrown); } while (next == null && ((isFailedToOfferToQueue.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)); } catch (final InterruptedException e) { throw toRuntimeException(e); } } if (next == null && isFailedToOfferToQueue.isFalse() && isMainStreamCompleted.isTrue()) { while (elements.hasNext()) { next = elements.next(); if (!predicate.test(next)) { next = next == null ? none : next; break; } else { next = null; } } } checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); return next != null; } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T ret = next != null ? (next == none ? null : next) : elements.next(); next = null; return ret; } @Override public void close() { isSubscriberStreamCompleted.setTrue(); queue.clear(); checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); } }; final ObjIteratorEx iter = new ObjIteratorEx<>() { //NOSONAR private boolean isNewStreamStarted = false; private ContinuableFuture futureForNewStream = null; private boolean hasNext = false; private T next = null; @Override public boolean hasNext() { if (!hasNext) { nextCallCount.increment(); try { while (elements.hasNext()) { next = elements.next(); if (predicate.test(next)) { hasNext = true; break; } else { if (!isNewStreamStarted) { startNewStream(); } if (isFailedToOfferToQueue.isFalse() && isSubscriberStreamCompleted.isFalse()) { try { if (!queue.offer(next == null ? none : next, maxWaitForAddingElementToQuery, TimeUnit.MILLISECONDS) && isSubscriberStreamCompleted.isFalse()) { isFailedToOfferToQueue.setTrue(); } } catch (final Exception e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Failed to add element to queue for subscriber stream to consume"); isFailedToOfferToQueue.setTrue(); } } } } nextCallCount.decrement(); } finally { if (nextCallCount.value() > 0) { // exception happened. set nextCallCount to 2. nextCallCount.increment(); } } } return hasNext; } @Override public T next() { if (!hasNext && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return next; } @Override public void close() { isMainStreamCompleted.setTrue(); if (!isNewStreamStarted) { startNewStream(); } if (futureForNewStream != null) { try { futureForNewStream.get(); } catch (ExecutionException | InterruptedException e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Error happened in waiting for attached stream to close"); } } } private void startNewStream() { isNewStreamStarted = true; if (executor == null) { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction); } else { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction, executor); } } }; return newStream(iter, sorted, cmp, mergeCloseHandlers(iter::close, closeHandlers, true)); } /** * Attaches a new stream with terminal action to consume the elements not token by the specified {@code predicate}. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * @param predicate the predicate to test elements * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @return the main stream with the attached subscriber * @see #takeWhileAddSubscriber(Predicate, Throwables.Consumer, Executor) * @see #addSubscriber(Throwables.Consumer, int, long, Executor) */ @Beta @SequentialOnly @IntermediateOp public Stream takeWhileAddSubscriber(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction) { return takeWhileAddSubscriber(predicate, consumerForNewStreamWithTerminalAction, Stream.executor()); } /** * Attaches a new stream with terminal action to consume the elements not token by the specified {@code predicate}. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * @param predicate the predicate to test elements * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @param executor the executor to run the attached stream * @return the main stream with the attached subscriber * @throws IllegalArgumentException * @see #addSubscriber(Throwables.Consumer, int, long, Executor) */ @Beta @SequentialOnly @IntermediateOp public Stream takeWhileAddSubscriber(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final Executor executor) throws IllegalArgumentException { assertNotClosed(); checkArgNotNull(predicate, cs.predicate); checkArgNotNull(consumerForNewStreamWithTerminalAction, cs.consumerForNewStreamWithTerminalAction); checkArgNotNull(executor, cs.executor); // There will only one element will be put into queue at most at the begin for take while. Queue won't be used after the first element. return addSubscriberForTakeWhile(predicate, consumerForNewStreamWithTerminalAction, DEFAULT_BUFFERED_SIZE_PER_ITERATOR, MAX_WAIT_TIME_FOR_QUEUE_OFFER_FOR_ADD_SUBSCRIBER, executor); } private Stream addSubscriberForTakeWhile(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final int queueSize, final long maxWaitForAddingElementToQuery, final Executor executor) { final BlockingQueue queue = new ArrayBlockingQueue<>(queueSize <= 0 ? DEFAULT_BUFFERED_SIZE_PER_ITERATOR : queueSize); final ObjIterator elements = iteratorEx(); final T none = (T) NONE; final MutableBoolean isMainStreamCompleted = MutableBoolean.of(false); final MutableBoolean isTakeCompletedInMainStream = MutableBoolean.of(false); final MutableBoolean isSubscriberStreamCompleted = MutableBoolean.of(false); final MutableBoolean isFailedToOfferToQueue = MutableBoolean.of(false); final MutableInt nextCallCount = MutableInt.of(0); // it should end with 0 if there is no exception happening during hasNext()/next() call. final ObjIteratorEx iterForSubscriberStream = new ObjIteratorEx<>() { //NOSONAR private final MutableBoolean isExceptionThrown = MutableBoolean.of(false); private T next = null; @Override public boolean hasNext() { if (next == null && ((isFailedToOfferToQueue.isFalse() && isTakeCompletedInMainStream.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)) { try { do { next = queue.poll(MAX_WAIT_TIME_FOR_QUEUE_POLL, TimeUnit.MILLISECONDS); checkNextCallCountInMainStream(nextCallCount, isMainStreamCompleted, isExceptionThrown); } while (next == null && ((isFailedToOfferToQueue.isFalse() && isTakeCompletedInMainStream.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)); } catch (final InterruptedException e) { throw toRuntimeException(e); } } if (next == null && isFailedToOfferToQueue.isFalse() && isTakeCompletedInMainStream.isFalse()) { // it also means isMainStreamCompleted.isTrue() while (elements.hasNext()) { next = elements.next(); if (!predicate.test(next)) { next = next == null ? none : next; break; } else { next = null; } } isTakeCompletedInMainStream.setTrue(); } checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); return next != null || elements.hasNext(); } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T ret = next != null ? (next == none ? null : next) : elements.next(); next = null; return ret; } @Override public void close() { isSubscriberStreamCompleted.setTrue(); queue.clear(); checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); } }; final ObjIteratorEx iter = new ObjIteratorEx<>() { //NOSONAR private boolean isNewStreamStarted = false; private ContinuableFuture futureForNewStream = null; private boolean hasMore = true; private boolean hasNext = false; private T next = null; @Override public boolean hasNext() { if (!hasNext && hasMore) { nextCallCount.increment(); try { if (elements.hasNext()) { next = elements.next(); if (predicate.test(next)) { hasNext = true; } else { hasMore = false; if (!isNewStreamStarted) { startNewStream(); } if (isFailedToOfferToQueue.isFalse() && isSubscriberStreamCompleted.isFalse()) { try { if (!queue.offer(next == null ? none : next, maxWaitForAddingElementToQuery, TimeUnit.MILLISECONDS) && isSubscriberStreamCompleted.isFalse()) { isFailedToOfferToQueue.setTrue(); } } catch (final Exception e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Failed to add element to queue for subscriber stream to consume"); isFailedToOfferToQueue.setTrue(); } } isTakeCompletedInMainStream.setTrue(); } } nextCallCount.decrement(); } finally { if (nextCallCount.value() > 0) { // exception happened. set nextCallCount to 2. nextCallCount.increment(); } } } return hasNext; } @Override public T next() { if (!hasNext && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return next; } @Override public void close() { isMainStreamCompleted.setTrue(); if (!isNewStreamStarted) { startNewStream(); } if (futureForNewStream != null) { try { futureForNewStream.get(); } catch (ExecutionException | InterruptedException e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Error happened in waiting for attached stream to close"); } } } private void startNewStream() { isNewStreamStarted = true; if (executor == null) { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction); } else { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction, executor); } } }; return newStream(iter, sorted, cmp, mergeCloseHandlers(iter::close, closeHandlers, true)); } /** * Attaches a new stream with terminal action to consume the elements dropped by the specified {@code predicate}. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * @param predicate the predicate to test elements * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @return the main stream with the attached subscriber * @see #dropWhileAddSubscriber(Predicate, Throwables.Consumer, int, long, Executor) * @see #addSubscriber(Throwables.Consumer, int, long, Executor) * @see #dropWhile(Predicate, Consumer) */ @Beta @SequentialOnly @IntermediateOp public Stream dropWhileAddSubscriber(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction) { return dropWhileAddSubscriber(predicate, consumerForNewStreamWithTerminalAction, DEFAULT_BUFFERED_SIZE_PER_ITERATOR, MAX_WAIT_TIME_FOR_QUEUE_OFFER_FOR_ADD_SUBSCRIBER, Stream.executor()); } /** * Attaches a new stream with terminal action to consume the elements dropped by the specified {@code predicate}. * The Intermediate and terminate operations in the attached stream will be executed in a new Thread. * The new thread is started when the main stream(the returned stream or its downstream) receives the first element or is closed if there is no element pulled. * Elements from upstream pulled by the main stream will be put in a queue for the attached stream to consume. * After the main stream is finished, the attached stream will continue to pull remaining elements from upstream if needed. * The main stream and the attached stream run independently. Operations in one stream won't impact the elements or final result in another Stream. * But when the main stream is to close, it will wait to the attached stream to close before calling close actions. * So the attached stream may ends up earlier than the main stream, or at least no later than the main stream. * * @param predicate the predicate to test elements * @param consumerForNewStreamWithTerminalAction the consumer for the new stream with terminal action * @param queueSize the size of the queue. Default value is 64. * @param maxWaitForAddingElementToQuery max wait time to add next element to queue for subscriber stream to consumer. Default value is 30000 (unit is milliseconds). * If the next element can't be added to queue after waiting for period, exception will be thrown in subscriber stream. * @param executor the executor to run the attached stream * @return the main stream with the attached subscriber * @throws IllegalStateException * @throws IllegalArgumentException * @see #addSubscriber(Throwables.Consumer, int, long, Executor) * @see #dropWhile(Predicate, Consumer) */ @Beta @SequentialOnly @IntermediateOp public Stream dropWhileAddSubscriber(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final int queueSize, final long maxWaitForAddingElementToQuery, final Executor executor) throws IllegalStateException, IllegalArgumentException { assertNotClosed(); checkArgNotNull(predicate, cs.predicate); checkArgNotNull(consumerForNewStreamWithTerminalAction, cs.consumerForNewStreamWithTerminalAction); checkArgPositive(queueSize, cs.queueSize); checkArgPositive(maxWaitForAddingElementToQuery, cs.maxWaitForAddingElementToQuery); checkArgNotNull(executor, cs.executor); return addSubscriberForDropWhile(predicate, consumerForNewStreamWithTerminalAction, queueSize, maxWaitForAddingElementToQuery, executor); } private Stream addSubscriberForDropWhile(final Predicate predicate, final Throwables.Consumer, ? extends Exception> consumerForNewStreamWithTerminalAction, final int queueSize, final long maxWaitForAddingElementToQuery, final Executor executor) { final BlockingQueue queue = new ArrayBlockingQueue<>(queueSize <= 0 ? DEFAULT_BUFFERED_SIZE_PER_ITERATOR : queueSize); final ObjIteratorEx elements = iteratorEx(); final T none = (T) NONE; final MutableBoolean isMainStreamCompleted = MutableBoolean.of(false); final MutableBoolean isDropCompletedInMainStream = MutableBoolean.of(false); final MutableBoolean isSubscriberStreamCompleted = MutableBoolean.of(false); final MutableBoolean isFailedToOfferToQueue = MutableBoolean.of(false); final MutableInt nextCallCount = MutableInt.of(0); // it should end with 0 if there is no exception happening during hasNext()/next() call. final ObjIteratorEx iterForSubscriberStream = new ObjIteratorEx<>() { //NOSONAR private final MutableBoolean isExceptionThrown = MutableBoolean.of(false); private T next = null; @Override public boolean hasNext() { if (next == null && ((isFailedToOfferToQueue.isFalse() && isDropCompletedInMainStream.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)) { try { do { next = queue.poll(MAX_WAIT_TIME_FOR_QUEUE_POLL, TimeUnit.MILLISECONDS); checkNextCallCountInMainStream(nextCallCount, isMainStreamCompleted, isExceptionThrown); } while (next == null && ((isFailedToOfferToQueue.isFalse() && isDropCompletedInMainStream.isFalse() && isMainStreamCompleted.isFalse()) || queue.size() > 0)); } catch (final InterruptedException e) { throw toRuntimeException(e); } } if ((next == null && isFailedToOfferToQueue.isFalse() && isDropCompletedInMainStream.isFalse()) && elements.hasNext()) { next = elements.next(); if (predicate.test(next)) { next = next == null ? none : next; } else { next = null; isDropCompletedInMainStream.setTrue(); } } checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); return next != null; } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T ret = next != null ? (next == none ? null : next) : elements.next(); next = null; return ret; } @Override public void close() { isSubscriberStreamCompleted.setTrue(); queue.clear(); checkExceptionsForSubscriber(isFailedToOfferToQueue, nextCallCount, isMainStreamCompleted, queueSize, isExceptionThrown); } }; final ObjIteratorEx iter = new ObjIteratorEx<>() { //NOSONAR private boolean isNewStreamStarted = false; private ContinuableFuture futureForNewStream = null; private boolean hasNext = false; private T next = null; private boolean dropped = false; @Override public boolean hasNext() { if (!hasNext) { if (!dropped) { dropped = true; nextCallCount.increment(); try { while (elements.hasNext()) { next = elements.next(); if (!predicate.test(next)) { isDropCompletedInMainStream.setTrue(); hasNext = true; break; } else { if (!isNewStreamStarted) { startNewStream(); } if (isFailedToOfferToQueue.isFalse() && isSubscriberStreamCompleted.isFalse()) { try { if (!queue.offer(next == null ? none : next, maxWaitForAddingElementToQuery, TimeUnit.MILLISECONDS) && isSubscriberStreamCompleted.isFalse()) { isFailedToOfferToQueue.setTrue(); } } catch (final Exception e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Failed to add element to queue for subscriber stream to consume"); isFailedToOfferToQueue.setTrue(); } } } } nextCallCount.decrement(); } finally { if (nextCallCount.value() > 0) { // exception happened. set nextCallCount to 2. nextCallCount.increment(); } } } else if (elements.hasNext()) { next = elements.next(); hasNext = true; } } return hasNext; } @Override public T next() { if (!hasNext && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return next; } @Override public void close() { isMainStreamCompleted.setTrue(); if (!isNewStreamStarted) { startNewStream(); } if (futureForNewStream != null) { try { futureForNewStream.get(); } catch (ExecutionException | InterruptedException e) { // ExceptionUtil.toRuntimeException(e, true); // This may impact main stream. Error happened in attached stream should not impact main stream. logger.error(e, "Error happened in waiting for attached stream to close"); } } } private void startNewStream() { isNewStreamStarted = true; if (executor == null) { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction); } else { futureForNewStream = Stream.of(iterForSubscriberStream) .onClose(iterForSubscriberStream::close) .asyncRun(consumerForNewStreamWithTerminalAction, executor); } } }; return newStream(iter, sorted, cmp, mergeCloseHandlers(iter::close, closeHandlers, true)); } private static void checkNextCallCountInMainStream(final MutableInt nextCallCount, final MutableBoolean isMainStreamCompleted, final MutableBoolean isExceptionThrown) { if (nextCallCount.value() > 1 || (isMainStreamCompleted.isTrue() && nextCallCount.value() > 0)) { isExceptionThrown.setTrue(); throw new IllegalStateException("Exception happened in calling hasNext()/next() in main stream"); } } private static void checkExceptionsForSubscriber(final MutableBoolean isFailedToOfferToQueue, final MutableInt nextCallCount, final MutableBoolean isMainStreamCompleted, final int queueSize, final MutableBoolean isExceptionThrown) { if (isExceptionThrown.isFalse()) { if (isFailedToOfferToQueue.isTrue()) { isExceptionThrown.setTrue(); throw new IllegalStateException( "Failed to add element to queue(size=" + queueSize + ") because it's full or exception happened. Elements may be missed"); } checkNextCallCountInMainStream(nextCallCount, isMainStreamCompleted, isExceptionThrown); } } /** * Executes the provided terminal operation asynchronously on this Stream. * The terminal operation is a function that consumes this Stream and may throw an exception. * The result of the operation is wrapped in a ContinuableFuture. * * @param terminalAction the terminal operation to be executed on this Stream * @return a ContinuableFuture representing the result of the asynchronous computation * @throws IllegalArgumentException if terminalAction is null */ @Beta @TerminalOp public ContinuableFuture asyncRun(final Throwables.Consumer, ? extends Exception> terminalAction) throws IllegalArgumentException { checkArgNotNull(terminalAction, cs.terminalAction); return ContinuableFuture.run(() -> terminalAction.accept(Stream.this)); } /** * Executes the provided terminal operation asynchronously on this Stream using the specified Executor. * The terminal operation is a function that consumes this Stream and may throw an exception. * The result of the operation is wrapped in a ContinuableFuture. * * @param terminalAction the terminal operation to be executed on this Stream * @param executor the Executor to use for asynchronous execution * @return a ContinuableFuture representing the result of the asynchronous computation * @throws IllegalArgumentException if terminalAction is null */ @Beta @TerminalOp public ContinuableFuture asyncRun(final Throwables.Consumer, ? extends Exception> terminalAction, final Executor executor) throws IllegalArgumentException { checkArgNotNull(terminalAction, cs.terminalAction); checkArgNotNull(executor, cs.executor); return ContinuableFuture.run(() -> terminalAction.accept(Stream.this), executor); } /** * Executes the provided terminal operation asynchronously on this Stream. * The terminal operation is a function that consumes this Stream and may throw an exception. * The result of the operation is wrapped in a ContinuableFuture. * * @param the type of the result of the terminal operation * @param terminalAction the terminal operation to be executed on this Stream * @return a ContinuableFuture representing the result of the asynchronous computation * @throws IllegalArgumentException if terminalAction is null */ @Beta @TerminalOp public ContinuableFuture asyncCall(final Throwables.Function, R, ? extends Exception> terminalAction) throws IllegalArgumentException { checkArgNotNull(terminalAction, cs.terminalAction); return ContinuableFuture.call(() -> terminalAction.apply(Stream.this)); } /** * Executes the provided terminal operation asynchronously on this Stream using the specified Executor. * The terminal operation is a function that consumes this Stream and may throw an exception. * The result of the operation is wrapped in a ContinuableFuture. * * @param the type of the result of the terminal operation * @param terminalAction the terminal operation to be executed on this Stream * @param executor the Executor to use for asynchronous execution * @return a ContinuableFuture representing the result of the asynchronous computation * @throws IllegalArgumentException if terminalAction is null */ @Beta @TerminalOp public ContinuableFuture asyncCall(final Throwables.Function, R, ? extends Exception> terminalAction, final Executor executor) throws IllegalArgumentException { checkArgNotNull(terminalAction, cs.terminalAction); checkArgNotNull(executor, cs.executor); return ContinuableFuture.call(() -> terminalAction.apply(Stream.this), executor); } // @SuppressWarnings("rawtypes") // private static final Stream EMPTY_STREAM = new ArrayStream<>(N.EMPTY_OBJECT_ARRAY, true, NATURAL_COMPARATOR, null); /** * Returns an empty Stream. * * @param the type of the elements in the Stream * @return an empty Stream */ public static Stream empty() { //noinspection rawtypes return new ArrayStream<>(N.EMPTY_OBJECT_ARRAY, true, NATURAL_COMPARATOR, null); } /** * Returns a Stream that is lazily populated by an input supplier. * *
* @implNote it's equivalent to: {@code Stream.just(supplier).flatMap(Supplier::get)}. * * @param the type of the stream elements * @param supplier the Supplier that generates the Stream * @return a Stream instance * @throws IllegalArgumentException if the supplier is null */ public static Stream defer(final Supplier> supplier) throws IllegalArgumentException { N.checkArgNotNull(supplier, cs.supplier); //noinspection resource return Stream.just(supplier).flatMap(Supplier::get); } /** * Creates a new Stream from the provided java.util.stream.Stream. * The returned Stream will be sequential or parallel based on the provided java.util.stream.Stream. * The provided stream will be closed once one of the terminal operations in the returned stream is executed or the returned stream is closed. * * @param the type of the elements in the Stream * @param stream the java.util.stream.Stream to convert * @return a new Stream containing the elements of the provided java.util.stream.Stream */ public static Stream from(final java.util.stream.Stream stream) { if (stream == null) { return empty(); } return of(new ObjIteratorEx() { private Iterator iter = null; @Override public boolean hasNext() { if (iter == null) { iter = stream.iterator(); } return iter.hasNext(); } @Override public T next() { if (iter == null) { iter = stream.iterator(); } return iter.next(); } @Override public long count() { return iter == null ? stream.count() : super.count(); } @Override public void advance(final long n) { if (iter == null) { iter = stream.skip(n).iterator(); } else { super.advance(n); } } @Override public Object[] toArray() { return iter == null ? stream.toArray() : super.toArray(); } @Override public A[] toArray(final A[] a) { return iter == null ? stream.toArray(value -> a) : super.toArray(a); } }).transform(s -> stream.isParallel() ? s.parallel() : s.sequential()).onClose(stream::close); } /** * Creates a stream containing a single element. * * @param the type of the element * @param e the element to be included in the stream * @return a stream containing the specified element */ public static Stream just(final T e) { return of(N.asArray(e)); } /** * Returns an empty {@code Stream} if the specified element is {@code null}. * Otherwise, returns a {@code Stream} containing the specified element. * * @param the type of the element * @param e the element to be included in the stream, or {@code null} * @return a stream containing the specified element, or an empty stream if the element is {@code null} */ public static Stream ofNullable(final T e) { return e == null ? Stream.empty() : of(e); } /** * Returns a stream containing the specified elements. * * @param the type of the elements * @param a the elements to be included in the stream * @return a stream containing the specified elements. If the specified array is empty, an empty stream is returned. */ @SafeVarargs public static Stream of(final T... a) { return N.isEmpty(a) ? (Stream) empty() : of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param the type of the elements * @param a the array from which elements are to be included in the stream * @param startIndex the starting index (inclusive) of the array * @param endIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final T[] a, final int startIndex, final int endIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(startIndex, endIndex, N.len(a)); return N.isEmpty(a) && (startIndex == 0 && endIndex == 0) ? (Stream) empty() : new ArrayStream<>(a, startIndex, endIndex); } /** * Returns a stream containing the specified elements from the given collection. * * @param the type of the elements * @param c the collection from which elements are to be included in the stream * @return a stream containing the specified elements from the collection */ public static Stream of(final Collection c) { return N.isEmpty(c) ? (Stream) empty() : of(c, 0, c.size()); } /** * Returns a stream containing the specified elements from the given collection, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param the type of the elements * @param c the collection from which elements are to be included in the stream * @param startIndex the starting index (inclusive) of the collection * @param endIndex the ending index (exclusive) of the collection * @return a stream containing the specified elements from the collection * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final Collection c, final int startIndex, final int endIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(startIndex, endIndex, N.size(c)); if (N.isEmpty(c) && (startIndex == 0 && endIndex == 0)) { return empty(); } // return new CollectionStream(c); // return new ArrayStream((T[]) c.toArray()); // faster if (isListElementDataFieldGettable && listElementDataField != null && c.getClass().equals(ArrayList.class)) { T[] a = null; try { a = (T[]) listElementDataField.get(c); } catch (final Throwable e) { // NOSONAR // ignore; isListElementDataFieldGettable = false; } if (a != null) { return of(a, startIndex, endIndex); } } if (startIndex == 0 && endIndex == c.size()) { // return (c.size() > 10 && (c.size() < 1000 || (c.size() < 100000 && c instanceof ArrayList))) ? streamOf((T[]) c.toArray()) : c.stream(); return of(ObjIteratorEx.of(c)); } else { return of(ObjIteratorEx.of(c), startIndex, endIndex); } } /** * Returns a stream containing the specified entries from the given map. * * @param the type of the keys * @param the type of the values * @param map the map from which entries are to be included in the stream * @return a stream containing the specified entries from the map */ public static Stream> of(final Map map) { if (N.isEmpty(map)) { return empty(); } @SuppressWarnings("rawtypes") final Map tmp = (Map) map; return Stream.of(tmp.entrySet()); } /** * Returns a stream containing the elements of the specified iterable. * * @param the type of the elements * @param iterable the iterable whose elements are to be included in the stream * @return a stream containing the elements of the specified iterable */ public static Stream of(final Iterable iterable) { if (iterable == null) { return empty(); } if (iterable instanceof Collection) { return of((Collection) iterable); } else { return of(iterable.iterator()); } } /** * Returns a stream containing the elements of the specified iterator. * * @param the type of the elements * @param iterator the iterator whose elements are to be included in the stream * @return a stream containing the elements of the specified iterator */ public static Stream of(final Iterator iterator) { if (iterator == null) { return empty(); } return new IteratorStream<>(iterator); } /** * Returns a stream containing the elements of the specified iterator, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param the type of the elements * @param iterator the iterator whose elements are to be included in the stream * @param startIndex the starting index (inclusive) of the iterator * @param endIndex the ending index (exclusive) of the iterator * @return a stream containing the specified elements from the iterator */ static Stream of(final Iterator iterator, final int startIndex, final int endIndex) { if (iterator == null && (startIndex == 0 && endIndex == 0)) { return empty(); } if (startIndex < 0 || endIndex < startIndex) { throw new IllegalArgumentException("startIndex(" + startIndex + ") or endIndex(" + endIndex + ") is invalid"); } //noinspection resource return Stream. of(iterator).skip(startIndex).limit(endIndex - startIndex); //NOSONAR } /** * Returns a stream containing the elements of the specified JDK stream. * * @param the type of the elements * @param stream the JDK stream whose elements are to be included in the stream * @return a stream containing the elements of the specified JDK stream * @deprecated Use {@link #from(java.util.stream.Stream)} instead * @see #from(java.util.stream.Stream) */ // Should the name be from? @Deprecated public static Stream of(final java.util.stream.Stream stream) { return from(stream); } /** * Returns a stream containing the elements of the specified enumeration. * * @param the type of the elements * @param enumeration the enumeration whose elements are to be included in the stream * @return a stream containing the elements of the specified enumeration */ public static Stream of(final Enumeration enumeration) { if (enumeration == null) { return empty(); } return new IteratorStream<>(new ObjIteratorEx<>() { @Override public boolean hasNext() { return enumeration.hasMoreElements(); } @Override public T next() { // NOSONAR return enumeration.nextElement(); } }); } /** * Returns a stream containing the elements of the specified boolean array. * * @param a the boolean array whose elements are to be included in the stream * @return a stream containing the elements of the specified boolean array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final boolean[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given boolean array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the boolean array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final boolean[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Boolean next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Boolean.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the elements of the specified char array. * * @param a the char array whose elements are to be included in the stream * @return a stream containing the elements of the specified char array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final char[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given char array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the char array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final char[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Character next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Character.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the elements of the specified byte array. * * @param a the byte array whose elements are to be included in the stream * @return a stream containing the elements of the specified byte array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final byte[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given byte array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the byte array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final byte[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Byte next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Byte.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the elements of the specified short array. * * @param a the short array whose elements are to be included in the stream * @return a stream containing the elements of the specified short array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final short[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given short array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the short array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final short[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Short next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Short.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the elements of the specified int array. * * @param a the int array whose elements are to be included in the stream * @return a stream containing the elements of the specified int array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final int[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given int array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the int array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final int[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Integer next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Integer.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the elements of the specified long array. * * @param a the long array whose elements are to be included in the stream * @return a stream containing the elements of the specified long array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final long[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given long array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the long array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final long[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Long next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Long.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the elements of the specified float array. * * @param a the float array whose elements are to be included in the stream * @return a stream containing the elements of the specified float array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final float[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given float array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the float array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final float[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Float next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Float.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the elements of the specified double array. * * @param a the double array whose elements are to be included in the stream * @return a stream containing the elements of the specified double array. If the specified array is empty, an empty stream is returned. */ public static Stream of(final double[] a) { if (N.isEmpty(a)) { return empty(); } return of(a, 0, a.length); } /** * Returns a stream containing the specified elements from the given double array, * starting from the specified start index (inclusive) to the specified end index (exclusive). * * @param a the double array from which elements are to be included in the stream * @param fromIndex the starting index (inclusive) of the array * @param toIndex the ending index (exclusive) of the array * @return a stream containing the specified elements from the array * @throws IndexOutOfBoundsException if the specified start index or end index is out of bounds */ public static Stream of(final double[] a, final int fromIndex, final int toIndex) throws IndexOutOfBoundsException { N.checkFromToIndex(fromIndex, toIndex, N.len(a)); if (N.isEmpty(a)) { return empty(); } return of(new ObjIteratorEx<>() { private int cursor = fromIndex; @Override public boolean hasNext() { return cursor < toIndex; } @Override public Double next() { if (cursor >= toIndex) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return a[cursor++]; } @Override public long count() { return toIndex - cursor; //NOSONAR } @Override public void advance(final long n) { cursor = n < toIndex - cursor ? cursor + (int) n : toIndex; } @Override public A[] toArray(A[] a2) { a2 = a2.length >= toIndex - cursor ? a2 : (A[]) N.newArray(a2.getClass().getComponentType(), toIndex - cursor); for (int i = 0, len = toIndex - cursor; i < len; i++) { a2[i] = (A) Double.valueOf(a[cursor++]); } return a2; } }); } /** * Returns a stream containing the element of the specified Optional if it is present. * * @param the type of the element * @param op the Optional whose element is to be included in the stream * @return a stream containing the element of the specified Optional if it is present, otherwise an empty stream */ public static Stream of(final Optional op) { return op == null || op.isEmpty() ? Stream.empty() : Stream.of(op.get()); //NOSONAR } /** * Returns a stream containing the element of the specified Optional if it is present. * * @param the type of the element * @param op the Optional whose element is to be included in the stream * @return a stream containing the element of the specified Optional if it is present, otherwise an empty */ public static Stream of(final java.util.Optional op) { return op == null || op.isEmpty() ? Stream.empty() : Stream.of(op.get()); //NOSONAR } /** * Returns a stream containing the keys of the specified map. * * @param the type of the keys in the map * @param map the map whose keys are to be included in the stream * @return a stream containing the keys of the specified map */ public static Stream ofKeys(final Map map) { if (N.isEmpty(map)) { return Stream.empty(); } return of(map.keySet()); } /** * Returns a stream containing the keys of the specified map whose values match the given predicate. * * @param the type of the keys in the map * @param the type of the values in the map * @param map the map whose keys are to be included in the stream * @param valueFilter the predicate to filter the values * @return a stream containing the keys of the specified map whose values match the given predicate */ public static Stream ofKeys(final Map map, final Predicate valueFilter) { if (N.isEmpty(map)) { return StreamEx.empty(); } //noinspection resource return EntryStream. of(map).filterByValue(valueFilter).keys(); } /** * Returns a stream containing the keys of the specified map whose entries match the given bi-predicate. * * @param the type of the keys in the map * @param the type of the values in the map * @param map the map whose keys are to be included in the stream * @param filter the bi-predicate to filter the entries * @return a stream containing the keys of the specified map whose entries match the given bi-predicate */ public static Stream ofKeys(final Map map, final BiPredicate filter) { if (N.isEmpty(map)) { return StreamEx.empty(); } //noinspection resource return EntryStream. of(map).filter(filter).keys(); } /** * Returns a stream containing the values of the specified map. * * @param the type of the values in the map * @param map the map whose values are to be included in the stream * @return a stream containing the values of the specified map */ public static Stream ofValues(final Map map) { if (N.isEmpty(map)) { return Stream.empty(); } return of(map.values()); } /** * Returns a stream containing the values of the specified map whose keys match the given predicate. * * @param the type of the keys in the map * @param the type of the values in the map * @param map the map whose values are to be included in the stream * @param keyFilter the predicate to filter the keys * @return a stream containing the values of the specified map whose keys match the given predicate */ public static Stream ofValues(final Map map, final Predicate keyFilter) { if (N.isEmpty(map)) { return Stream.empty(); } //noinspection resource return EntryStream. of(map).filterByKey(keyFilter).values(); } /** * Returns a stream containing the values of the specified map whose entries match the given bi-predicate. * * @param the type of the keys in the map * @param the type of the values in the map * @param map the map whose values are to be included in the stream * @param filter the bi-predicate to filter the entries * @return a stream containing the values of the specified map whose entries match the given bi-predicate */ public static Stream ofValues(final Map map, final BiPredicate filter) { if (N.isEmpty(map)) { return Stream.empty(); } //noinspection resource return EntryStream. of(map).filter(filter).values(); } /** * Returns a stream containing the elements of the specified array in reverse order. * * @param the type of the elements in the array * @param array the array whose elements are to be included in the stream in reverse order * @return a stream containing the elements of the specified array in reverse order */ public static Stream ofReversed(final T[] array) { final int len = N.len(array); //noinspection resource return IntStream.range(0, len).mapToObj(idx -> array[len - idx - 1]); } /** * Returns a stream containing the elements of the specified list in reverse order. * * @param the type of the elements in the list * @param list the list whose elements are to be included in the stream in reverse order * @return a stream containing the elements of the specified list in reverse order */ public static Stream ofReversed(final List list) { final int size = N.size(list); //noinspection resource return IntStream.range(0, size).mapToObj(idx -> list.get(size - idx - 1)); } /** * Returns a stream of integers within the specified range. * * @param startInclusive the starting value (inclusive) * @param endExclusive the ending value (exclusive) * @return a stream of integers from startInclusive (inclusive) to endExclusive (exclusive) */ public static Stream range(final int startInclusive, final int endExclusive) { //noinspection resource return IntStream.range(startInclusive, endExclusive).boxed(); } /** * Returns a stream of integers within the specified range, with a specified step. * * @param startInclusive the starting value (inclusive) * @param endExclusive the ending value (exclusive) * @param by The step to increment (if positive) or decrement (if negative) for each subsequent integer * @return a stream of integers from startInclusive (inclusive) to endExclusive (exclusive) with the specified step */ public static Stream range(final int startInclusive, final int endExclusive, final int by) { //noinspection resource return IntStream.range(startInclusive, endExclusive, by).boxed(); } /** * Returns a stream of longs within the specified range. * * @param startInclusive the starting value (inclusive) * @param endExclusive the ending value (exclusive) * @return a stream of longs from startInclusive (inclusive) to endExclusive (exclusive) */ public static Stream range(final long startInclusive, final long endExclusive) { //noinspection resource return LongStream.range(startInclusive, endExclusive).boxed(); } /** * Returns a stream of longs within the specified range, with a specified step. * * @param startInclusive the starting value (inclusive) * @param endExclusive the ending value (exclusive) * @param by The step to increment (if positive) or decrement (if negative) for each subsequent long * @return a stream of longs from startInclusive (inclusive) to endExclusive (exclusive) with the specified step */ public static Stream range(final long startInclusive, final long endExclusive, final long by) { //noinspection resource return LongStream.range(startInclusive, endExclusive, by).boxed(); } /** * Returns a stream of integers within the specified range. * * @param startInclusive the starting value (inclusive) * @param endInclusive the ending value (inclusive) * @return a stream of integers from startInclusive (inclusive) to endInclusive (inclusive) */ public static Stream rangeClosed(final int startInclusive, final int endInclusive) { //noinspection resource return IntStream.rangeClosed(startInclusive, endInclusive).boxed(); } /** * Returns a stream of integers within the specified range, with a specified step. * * @param startInclusive the starting value (inclusive) * @param endInclusive the ending value (inclusive) * @param by The step to increment (if positive) or decrement (if negative) for each subsequent long * @return a stream of integers from startInclusive (inclusive) to endInclusive (inclusive) with the specified step */ public static Stream rangeClosed(final int startInclusive, final int endInclusive, final int by) { //noinspection resource return IntStream.rangeClosed(startInclusive, endInclusive, by).boxed(); } /** * Returns a stream of longs within the specified range. * * @param startInclusive the starting value (inclusive) * @param endInclusive the ending value (inclusive) * @return a stream of longs from startInclusive (inclusive) to endInclusive (inclusive) */ public static Stream rangeClosed(final long startInclusive, final long endInclusive) { //noinspection resource return LongStream.rangeClosed(startInclusive, endInclusive).boxed(); } /** * Returns a stream of longs within the specified range, with a specified step. * * @param startInclusive the starting value (inclusive) * @param endInclusive the ending value (inclusive) * @param by The step to increment (if positive) or decrement (if negative) for each subsequent long * @return a stream of longs from startInclusive (inclusive) to endInclusive (inclusive) with the specified */ public static Stream rangeClosed(final long startInclusive, final long endInclusive, final long by) { //noinspection resource return LongStream.rangeClosed(startInclusive, endInclusive, by).boxed(); } /** * Splits the given character sequence by the specified delimiter and returns a stream of the resulting substrings. * * @param str the character sequence to be split * @param delimiter the character used as the delimiter * @return a stream of substrings resulting from splitting the input character sequence by the delimiter */ public static Stream split(final CharSequence str, final char delimiter) { return Splitter.with(delimiter).splitToStream(str); } /** * Splits the given character sequence by the specified delimiter and returns a stream of the resulting substrings. * * @param str the character sequence to be split * @param delimiter the character sequence used as the delimiter * @return a stream of substrings resulting from splitting the input character sequence by the delimiter */ public static Stream split(final CharSequence str, final CharSequence delimiter) { return Splitter.with(delimiter).splitToStream(str); } /** * Splits the given character sequence by the specified pattern and returns a stream of the resulting substrings. * * @param str the character sequence to be split * @param pattern the pattern used as the delimiter * @return a stream of substrings resulting from splitting the input character sequence by the pattern */ public static Stream split(final CharSequence str, final Pattern pattern) { return Splitter.with(pattern).splitToStream(str); } /** * Splits the given string into lines and returns a stream of the resulting lines. * * @param str the string to be split into lines * @return a stream of lines resulting from splitting the input string */ @Beta public static Stream splitToLines(final String str) { return lineSplitter.splitToStream(str); } /** * Splits the given string into lines and returns a stream of the resulting lines. * Optionally trims each line and omits empty lines based on the provided flags. * * @param str the string to be split into lines * @param trim whether to trim each line * @param omitEmptyLines whether to omit empty lines * @return a stream of lines resulting from splitting the input string */ @Beta public static Stream splitToLines(final String str, final boolean trim, final boolean omitEmptyLines) { if (trim) { if (omitEmptyLines) { return trimAndOmitEmptyLinesLineSplitter.splitToStream(str); } else { return trimLineSplitter.splitToStream(str); } } else if (omitEmptyLines) { return omitEmptyLinesLineSplitter.splitToStream(str); } else { return lineSplitter.splitToStream(str); } } /** * Splits the total size into chunks based on the specified maximum chunk count. *
* The size of the chunks is larger first. *
* The length of returned Stream may be less than the specified {@code maxChunkCount} if the input {@code totalSize} is less than {@code maxChunkCount}. * * @param the type of the elements in the resulting stream * @param totalSize the total size to be split. It could be the size of an array, list, etc. * @param maxChunkCount the maximum number of chunks to split into * @param mapper a function to map the chunk from and to index to an element in the resulting stream * @return a Stream of the mapped chunk values * @throws IllegalArgumentException if {@code totalSize} is negative or {@code maxChunkCount} is not positive. * @see #splitByChunkCount(int, int, boolean, IntBiFunction) */ public static Stream splitByChunkCount(final int totalSize, final int maxChunkCount, final IntBiFunction mapper) { return splitByChunkCount(totalSize, maxChunkCount, false, mapper); } /** * Splits the total size into chunks based on the specified maximum chunk count. *
* The size of the chunks can be either smaller or larger first based on the flag. *
* The length of returned Stream may be less than the specified {@code maxChunkCount} if the input {@code totalSize} is less than {@code maxChunkCount}. * *
     * 
     * final int[] a = Array.rangeClosed(1, 7);
     * splitByChunkCount(7, 5, true, (fromIndex, toIndex) ->  copyOfRange(a, fromIndex, toIndex)); // [[1], [2], [3], [4, 5], [6, 7]]
     * splitByChunkCount(7, 5, false, (fromIndex, toIndex) ->  copyOfRange(a, fromIndex, toIndex)); // [[1, 2], [3, 4], [5], [6], [7]]
     * 
     * 
* * @param the type of the elements in the resulting stream * @param totalSize the total size to be split. It could be the size of an array, list, etc. * @param maxChunkCount the maximum number of chunks to split into * @param sizeSmallerFirst if {@code true}, smaller chunks will be created first; otherwise, larger chunks will be created first * @param mapper a function to map the chunk from and to index to an element in the resulting stream * @return a Stream of the mapped chunk values * @throws IllegalArgumentException if {@code totalSize} is negative or {@code maxChunkCount} is not positive. * @see IntStream#splitByChunkCount(int, int, boolean, IntBinaryOperator) */ public static Stream splitByChunkCount(final int totalSize, final int maxChunkCount, final boolean sizeSmallerFirst, final IntBiFunction mapper) { N.checkArgNotNegative(totalSize, cs.totalSize); N.checkArgPositive(maxChunkCount, cs.maxChunkCount); if (totalSize == 0) { return Stream.empty(); } final int count = Math.min(totalSize, maxChunkCount); final int biggerSize = totalSize % maxChunkCount == 0 ? totalSize / maxChunkCount : totalSize / maxChunkCount + 1; final int biggerCount = totalSize % maxChunkCount; final int smallerSize = Math.max(totalSize / maxChunkCount, 1); final int smallerCount = count - biggerCount; ObjIteratorEx iter = null; if (sizeSmallerFirst) { iter = new ObjIteratorEx<>() { private int cnt = 0; private int cursor = 0; @Override public boolean hasNext() { return cursor < totalSize; } @Override public T next() { if (cursor >= totalSize) { throw new NoSuchElementException(InternalUtil.ERROR_MSG_FOR_NO_SUCH_EX); } return mapper.apply(cursor, cursor = (cnt++ < smallerCount ? cursor + smallerSize : cursor + biggerSize)); } @Override public void advance(long n) throws IllegalArgumentException { if (n > 0) { while (n-- > 0 && cursor < totalSize) { cursor = cnt++ < smallerCount ? cursor + smallerSize : cursor + biggerSize; } } } @Override public long count() { return count; } }; } else { iter = new ObjIteratorEx<>() { private int cnt = 0; private int cursor = 0; @Override public boolean hasNext() { return cursor < totalSize; } @Override public T next() { if (cursor >= totalSize) { throw new NoSuchElementException(InternalUtil.ERROR_MSG_FOR_NO_SUCH_EX); } return mapper.apply(cursor, cursor = (cnt++ < biggerCount ? cursor + biggerSize : cursor + smallerSize)); } @Override public void advance(long n) throws IllegalArgumentException { if (n > 0) { while (n-- > 0 && cursor < totalSize) { cursor = cnt++ < biggerCount ? cursor + biggerSize : cursor + smallerSize; } } } @Override public long count() { return count; } }; } return Stream.of(iter); } /** * Flattens a collection of collections into a single stream of elements. * * @param the type of elements in the collections * @param c the collection of collections to be flattened * @return a stream of elements from the flattened collections */ public static Stream flatten(final Collection> c) { //noinspection resource return of(c).flatmap(Fn.> identity()); } /** * Flattens a two-dimensional array into a single stream of elements. * * @param the type of elements in the array * @param a the two-dimensional array to be flattened * @return a stream of elements from the flattened array */ public static Stream flatten(final T[][] a) { //noinspection resource return of(a).flattMap(Fn.identity()); } /** * Flattens a two-dimensional array into a single stream of elements. * * @param the type of elements in the array * @param a the two-dimensional array to be flattened * @param vertically if {@code true}, the array is flattened vertically; otherwise, it is flattened horizontally * @return a stream of elements from the flattened array */ public static Stream flatten(final T[][] a, final boolean vertically) { if (N.isEmpty(a)) { return empty(); } else if (a.length == 1) { return of(a[0]); } else if (!vertically) { //noinspection resource return of(a).flattMap(Fn.identity()); } long n = 0; for (final T[] e : a) { n += N.len(e); } if (n == 0) { return empty(); } final int rows = N.len(a); final long count = n; final Iterator iter = new ObjIteratorEx<>() { private int rowNum = 0; private int colNum = 0; private long cnt = 0; @Override public boolean hasNext() { return cnt < count; } @Override public T next() { if (cnt++ >= count) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } if (rowNum == rows) { rowNum = 0; colNum++; } while (a[rowNum] == null || colNum >= a[rowNum].length) { if (rowNum < rows - 1) { rowNum++; } else { rowNum = 0; colNum++; } } return a[rowNum++][colNum]; } }; return of(iter); } /** * Flattens a two-dimensional array into a single stream of elements, aligning the elements if necessary. * * @param the type of elements in the array * @param a the two-dimensional array to be flattened * @param valueForAlignment the element to append so there are the same number of elements in all rows/columns * @param vertically if {@code true}, the array is flattened vertically; otherwise, it is flattened horizontally * @return a stream of elements from the flattened array */ public static Stream flatten(final T[][] a, final T valueForAlignment, final boolean vertically) { if (N.isEmpty(a)) { return empty(); } else if (a.length == 1) { return of(a[0]); } long n = 0; int maxLen = 0; for (final T[] e : a) { n += N.len(e); maxLen = N.max(maxLen, N.len(e)); } if (n == 0) { return empty(); } final int rows = N.len(a); final int cols = maxLen; final long count = (long) rows * cols; Iterator iter = null; if (vertically) { iter = new ObjIteratorEx<>() { private int rowNum = 0; private int colNum = 0; private long cnt = 0; @Override public boolean hasNext() { return cnt < count; } @Override public T next() { if (cnt++ >= count) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } if (rowNum == rows) { rowNum = 0; colNum++; } if (a[rowNum] == null || colNum >= a[rowNum].length) { rowNum++; return valueForAlignment; } else { return a[rowNum++][colNum]; } } }; } else { iter = new ObjIteratorEx<>() { private int rowNum = 0; private int colNum = 0; private long cnt = 0; @Override public boolean hasNext() { return cnt < count; } @Override public T next() { if (cnt++ >= count) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } if (colNum >= cols) { colNum = 0; rowNum++; } if (a[rowNum] == null || colNum >= a[rowNum].length) { colNum++; return valueForAlignment; } else { return a[rowNum][colNum++]; } } }; } return of(iter); } /** * Flattens a three-dimensional array into a single stream of elements. * * @param the type of elements in the array * @param a the three-dimensional array to be flattened * @return a stream of elements from the flattened array */ public static Stream flatten(final T[][][] a) { //noinspection resource return of(a).flattMap(e -> e).flattMap(Fn.identity()); } /** * Creates a stream that repeats the given element a specified number of times. * * @param the type of the element * @param element the element to be repeated * @param n the number of times to repeat the element * @return a stream of the repeated element * @throws IllegalArgumentException if the number of repetitions is negative */ public static Stream repeat(final T element, final long n) throws IllegalArgumentException { N.checkArgNotNegative(n, cs.n); if (n == 0) { return empty(); } return new IteratorStream<>(new ObjIteratorEx<>() { private long cnt = n; @Override public boolean hasNext() { return cnt > 0; } @Override public T next() { if (cnt-- <= 0) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return element; } @Override public void advance(final long n) { cnt = n >= cnt ? 0 : cnt - (int) n; } @Override public long count() { return cnt; } @Override public
A[] toArray(A[] a) { a = a.length >= cnt ? a : N.copyOf(a, (int) cnt); for (int i = 0; i < cnt; i++) { a[i] = (A) element; } cnt = 0; return a; } }); } /** * Creates a stream that iterates using the given hasNext and next suppliers. * * @param the type of elements in the stream * @param hasNext a BooleanSupplier that returns {@code true} if the iteration should continue * @param next a Supplier that provides the next element in the iteration * @return a stream of elements generated by the iteration * @throws IllegalArgumentException if hasNext or next is null */ public static Stream iterate(final BooleanSupplier hasNext, final Supplier next) throws IllegalArgumentException { N.checkArgNotNull(hasNext); N.checkArgNotNull(next); return of(new ObjIteratorEx<>() { private boolean hasNextVal = false; @Override public boolean hasNext() { if (!hasNextVal) { hasNextVal = hasNext.getAsBoolean(); } return hasNextVal; } @Override public T next() { if (!hasNextVal && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNextVal = false; return next.get(); } }); } /** * Returns a sequential ordered {@code Stream} produced by iterative * application of a function {@code f} to an initial element {@code init}, * producing a {@code Stream} consisting of {@code init}, {@code f(init)}, * {@code f(f(init))}, etc. * *

The first element (position {@code 0}) in the {@code Stream} will be * the provided {@code init}. For {@code n > 0}, the element at position * {@code n}, will be the result of applying the function {@code f} to the * element at position {@code n - 1}. * * @param the type of elements in the stream * @param init the initial element * @param hasNext a predicate that returns {@code true} if the iteration should continue * @param f a function to apply to the previous element to generate the next element * @return a stream of elements generated by the iteration * @throws IllegalArgumentException if the initial element, {@code hasNext} or {@code f} is null */ public static Stream iterate(final T init, final BooleanSupplier hasNext, final UnaryOperator f) throws IllegalArgumentException { N.checkArgNotNull(init); N.checkArgNotNull(hasNext); N.checkArgNotNull(f); return of(new ObjIteratorEx<>() { private T cur = (T) NONE; private boolean hasNextVal = false; @Override public boolean hasNext() { if (!hasNextVal) { hasNextVal = hasNext.getAsBoolean(); } return hasNextVal; } @Override public T next() { if (!hasNextVal && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNextVal = false; return cur = (cur == NONE ? init : f.apply(cur)); } }); } /** * Creates a stream that iterates from an initial value, applying a function to generate subsequent values, * and continues as long as a predicate is satisfied. * * @param the type of elements in the stream * @param init the initial value * @param hasNext test if has next by hasNext.test(init) for first time and hasNext.test(f.apply(previous)) for remaining. * @param f a function to apply to the previous element to generate the next element * @return a stream of elements generated by the iteration * @throws IllegalArgumentException if the initial element, hasNext or f is null */ public static Stream iterate(final T init, final Predicate hasNext, final UnaryOperator f) throws IllegalArgumentException { N.checkArgNotNull(init); N.checkArgNotNull(hasNext); N.checkArgNotNull(f); return of(new ObjIteratorEx<>() { private T cur = (T) NONE; private boolean hasMore = true; private boolean hasNextVal = false; @Override public boolean hasNext() { if (!hasNextVal && hasMore) { hasNextVal = hasNext.test((cur = (cur == NONE ? init : f.apply(cur)))); if (!hasNextVal) { hasMore = false; } } return hasNextVal; } @Override public T next() { if (!hasNextVal && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNextVal = false; return cur; } }); } /** * Creates a stream that iterates over elements starting from the given initial value. * Each subsequent element is generated by applying the provided unary operator to the previous element. * * @param the type of the elements in the stream * @param init the initial element * @param f the function to apply to the previous element to produce the next element * @return a stream of elements generated by iterating with the given function * @throws IllegalArgumentException if the initial element or f is null */ public static Stream iterate(final T init, final UnaryOperator f) throws IllegalArgumentException { N.checkArgNotNull(init); N.checkArgNotNull(f); return of(new ObjIteratorEx<>() { private T cur = (T) NONE; @Override public boolean hasNext() { return true; } @Override public T next() { // NOSONAR return cur = (cur == NONE ? init : f.apply(cur)); } }); } /** * Generates a Stream using the provided Supplier. * The supplier is used to generate each element of the stream. * * @param the type of the elements in the stream * @param supplier the Supplier that provides the elements of the stream * @return a Stream generated by the given supplier * @throws IllegalArgumentException if the supplier is null */ public static Stream generate(final Supplier supplier) throws IllegalArgumentException { N.checkArgNotNull(supplier, cs.supplier); return of(new ObjIteratorEx<>() { @Override public boolean hasNext() { return true; } @Override public T next() { // NOSONAR return supplier.get(); } }); } /** * Creates a stream of lines from the specified file. * * @param file the file to read lines from * @return a stream of lines from the file */ public static Stream ofLines(final File file) { return ofLines(file, Charsets.DEFAULT); } /** * Creates a stream of lines from the specified file. * * @param file the file to read lines from * @param charset the character set to use for reading the file * @return a stream of lines from the file */ public static Stream ofLines(final File file, final Charset charset) { N.checkArgNotNull(file, cs.file); final ObjIteratorEx iter = createLazyLineIterator(file, null, charset, null, true); return of(iter).onClose(iter::close); //NOSONAR } /** * Creates a stream of lines from the specified path. * * @param path the path to read lines from * @return a stream of lines from the path */ public static Stream ofLines(final Path path) { return ofLines(path, Charsets.DEFAULT); } /** * Creates a stream of lines from the specified path. * * @param path the path to read lines from * @param charset the character set to use for reading the file * @return a stream of lines from the path */ public static Stream ofLines(final Path path, final Charset charset) { N.checkArgNotNull(path, cs.path); final ObjIteratorEx iter = createLazyLineIterator(null, path, charset, null, true); return of(iter).onClose(iter::close); //NOSONAR } /** * Creates a stream of lines from the given Reader. *
* It's user's responsibility to close the input {@code reader} after the stream is completed. * * @param reader the Reader to read lines from * @return a Stream of lines read from the Reader */ public static Stream ofLines(final Reader reader) { return ofLines(reader, false); } /** * Creates a stream of lines from the given Reader. * * @param reader the Reader to read lines from * @param closeReaderWhenStreamIsClosed if {@code true}, the input {@code Reader} will be closed when the stream is closed * @return a Stream of lines read from the Reader * @throws IllegalArgumentException if the reader is null */ public static Stream ofLines(final Reader reader, final boolean closeReaderWhenStreamIsClosed) throws IllegalArgumentException { N.checkArgNotNull(reader, cs.reader); final ObjIteratorEx iter = createLazyLineIterator(null, null, Charsets.DEFAULT, reader, closeReaderWhenStreamIsClosed); if (closeReaderWhenStreamIsClosed) { return of(iter).onClose(iter::close); //NOSONAR } else { return of(iter); //NOSONAR } } private static ObjIteratorEx createLazyLineIterator(final File file, final Path path, final Charset charset, final Reader reader, final boolean closeReader) { return ObjIteratorEx.defer(new Supplier>() { private ObjIteratorEx lazyIter = null; @Override public synchronized ObjIteratorEx get() { if (lazyIter == null) { lazyIter = new ObjIteratorEx<>() { private final BufferedReader bufferedReader; { //NOSONAR if (reader != null) { bufferedReader = reader instanceof BufferedReader ? ((BufferedReader) reader) : new BufferedReader(reader); } else if (file != null) { bufferedReader = IOUtil.newBufferedReader(file, charset == null ? Charsets.DEFAULT : charset); } else { bufferedReader = IOUtil.newBufferedReader(path, charset == null ? Charsets.DEFAULT : charset); } } private final LineIterator lineIterator = new LineIterator(bufferedReader); @Override public boolean hasNext() { return lineIterator.hasNext(); } @Override public String next() { return lineIterator.next(); } @Override public void close() { if (closeReader) { IOUtil.closeQuietly(bufferedReader); } } }; } return lazyIter; } }); } /** * Lists the files in the specified parent directory. * * @param parentPath the parent directory to list files from * @return a stream of files in the parent directory */ public static Stream listFiles(final File parentPath) { N.checkArgNotNull(parentPath, cs.parentPath); if (!parentPath.exists()) { return empty(); } return of(parentPath.listFiles()); } /** * Lists the files in the specified parent directory. * * @param parentPath the parent directory to list files from * @param recursively if {@code true}, lists files recursively in sub-directories * @return a stream of files in the parent directory */ public static Stream listFiles(final File parentPath, final boolean recursively) { N.checkArgNotNull(parentPath, cs.parentPath); if (!parentPath.exists()) { return empty(); } else if (!recursively) { return of(parentPath.listFiles()); } final ObjIterator iter = new ObjIterator<>() { private final Queue paths = N.asLinkedList(parentPath); private File[] subFiles = null; private int cursor = 0; @Override public boolean hasNext() { if ((subFiles == null || cursor >= subFiles.length) && paths.size() > 0) { cursor = 0; subFiles = null; while (paths.size() > 0) { subFiles = paths.poll().listFiles(); if (N.notEmpty(subFiles)) { break; } } } return subFiles != null && cursor < subFiles.length; } @Override public File next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } if (subFiles[cursor].isDirectory()) { paths.offer(subFiles[cursor]); } return subFiles[cursor++]; } }; return of(iter); } /** * Creates a stream that generates elements at fixed intervals. * * @param the type of elements generated by the stream * @param intervalInMillis the interval in milliseconds between each element generation * @param s the supplier that generates the elements * @return a stream that generates elements at the specified interval */ public static Stream interval(final long intervalInMillis, final Supplier s) { return interval(0, intervalInMillis, s); } /** * Creates a stream that generates elements at fixed intervals after an initial delay. * * @param the type of elements generated by the stream * @param delayInMillis the initial delay in milliseconds before the first element is generated * @param intervalInMillis the interval in milliseconds between each element generation * @param s the supplier that generates the elements * @return a stream that generates elements at the specified interval after the initial delay */ public static Stream interval(final long delayInMillis, final long intervalInMillis, final Supplier s) { return interval(delayInMillis, intervalInMillis, TimeUnit.MILLISECONDS, s); } /** * Creates a stream that generates elements at fixed intervals after an initial delay. * * @param the type of elements generated by the stream * @param delay the initial delay before the first element is generated * @param interval the interval between each element generation * @param unit the time unit of the delay and interval * @param s the supplier that generates the elements * @return a stream that generates elements at the specified interval after the initial delay * @throws IllegalArgumentException if the supplier is null * @see LongStream#interval(long, long, TimeUnit) */ public static Stream interval(final long delay, final long interval, final TimeUnit unit, final Supplier s) throws IllegalArgumentException { N.checkArgNotNull(s, cs.supplier); //noinspection resource return LongStream.interval(delay, interval, unit).mapToObj(value -> s.get()); } /** * Creates a stream that generates elements at fixed intervals. * * @param the type of elements generated by the stream * @param intervalInMillis the interval in milliseconds between each element generation * @param s the function that generates the elements based on the current time in milliseconds * @return a stream that generates elements at the specified interval * @throws IllegalArgumentException if the function is null */ public static Stream interval(final long intervalInMillis, final LongFunction s) { return interval(0, intervalInMillis, s); } /** * Creates a stream that generates elements at fixed intervals after an initial delay. * * @param the type of elements generated by the stream * @param delayInMillis the initial delay in milliseconds before the first element is generated * @param intervalInMillis the interval in milliseconds between each element generation * @param s the function that generates the elements based on the current time in milliseconds * @return a stream that generates elements at the specified interval after the initial delay * @throws IllegalArgumentException if the function is null */ public static Stream interval(final long delayInMillis, final long intervalInMillis, final LongFunction s) { return interval(delayInMillis, intervalInMillis, TimeUnit.MILLISECONDS, s); } /** * Creates a stream that generates elements at fixed intervals after an initial delay. * * @param the type of elements generated by the stream * @param delay the initial delay before the first element is generated * @param interval the interval between each element generation * @param unit the time unit of the delay and interval * @param s the function that generates the elements based on the current time in the specified unit * @return a stream that generates elements at the specified interval after the initial delay * @throws IllegalArgumentException if the function is null * @see LongStream#interval(long, long, TimeUnit) */ public static Stream interval(final long delay, final long interval, final TimeUnit unit, final LongFunction s) throws IllegalArgumentException { N.checkArgNotNull(s); //noinspection resource return LongStream.interval(delay, interval, unit).mapToObj(s); } /** * Observes a blocking queue and creates a stream that polls elements from the queue in the specified duration. * *

     * 
     * final BlockingQueue queue = new ArrayBlockingQueue<>(32);
     * N.asyncExecute(() -> Stream.observe(queue, Duration.ofMillis(100)).filter(s -> s.startsWith("a")).forEach(Fn.println()));
     * N.asList("a", "b", "ab", "bc", "1", "a").forEach(queue::add);
     * N.sleep(10);
     * N.println("==================");
     * N.sleep(100);
     * N.println("==================");
     * N.sleep(10);
     * 
     * 
* * @param the type of elements in the queue * @param queue the blocking queue to observe * @param duration the total time to observe the queue * @return a stream that polls elements from the queue at the specified interval * @throws IllegalArgumentException if the queue or duration is null */ @Beta public static Stream observe(final BlockingQueue queue, final Duration duration) throws IllegalArgumentException { N.checkArgNotNull(queue, cs.queue); N.checkArgNotNull(duration, cs.duration); final long now = System.currentTimeMillis(); final long endTime = duration.toMillis() >= Long.MAX_VALUE - now ? Long.MAX_VALUE : now + duration.toMillis(); final Iterator iter = new ObjIterator<>() { private T next = null; @Override public boolean hasNext() { if (next == null) { final long curTime = System.currentTimeMillis(); if (curTime <= endTime) { try { next = queue.poll(endTime - curTime, TimeUnit.MILLISECONDS); } catch (final InterruptedException e) { throw toRuntimeException(e); } } } return next != null; } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T res = next; next = null; return res; } }; return of(iter); } /** * Observes a blocking queue and creates a stream that polls elements from the queue at fixed intervals. * *
     * 
     * final BlockingQueue queue = new ArrayBlockingQueue<>(32);
     * final MutableBoolean hasMore = MutableBoolean.of(true);
     * N.asyncExecute(() -> Stream.observe(queue, () -> hasMore.value(), 10).filter(s -> s.startsWith("a")).forEach(Fn.println()));
     * N.asList("a", "b", "ab", "bc", "1", "a").forEach(queue::add);
     * N.println("==================");
     * hasMore.setFalse();
     * N.sleep(50);
     * N.println("==================");
     * 
     * 
* * @param the type of elements in the queue * @param queue the blocking queue to observe * @param hasMore it will will be set to {@code true} if Stream is completed and the upstream should not continue to put elements to queue when it's completed. * This is an output parameter. * @param maxWaitIntervalInMillis the maximum wait interval in milliseconds between polling the queue * @return a stream that polls elements from the queue at the specified interval * @throws IllegalArgumentException if the queue or hasMore is {@code null}, or if maxWaitIntervalInMillis is negative */ @Beta public static Stream observe(final BlockingQueue queue, final BooleanSupplier hasMore, final long maxWaitIntervalInMillis) throws IllegalArgumentException { N.checkArgNotNull(queue, cs.queue); N.checkArgNotNull(hasMore, cs.hasMore); N.checkArgPositive(maxWaitIntervalInMillis, cs.maxWaitIntervalInMillis); final Iterator iter = new ObjIterator<>() { private T next = null; @Override public boolean hasNext() { if (next == null && (hasMore.getAsBoolean() || queue.size() > 0)) { try { do { next = queue.poll(maxWaitIntervalInMillis, TimeUnit.MILLISECONDS); } while (next == null && (hasMore.getAsBoolean() || queue.size() > 0)); } catch (final InterruptedException e) { throw toRuntimeException(e); } } return next != null; } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T res = next; next = null; return res; } }; return of(iter); } /** * Concatenates multiple arrays into a single stream. * * @param the type of elements in the arrays * @param a the arrays to be concatenated * @return a stream containing all the elements of the provided arrays */ @SafeVarargs public static Stream concat(final T[]... a) { if (N.isEmpty(a)) { return empty(); } return of(Iterators.concat(a)); } /** * Concatenates multiple iterables into a single stream. * * @param the type of elements in the iterables * @param a the iterables to be concatenated * @return a stream containing all the elements of the provided iterables */ @SafeVarargs public static Stream concat(final Iterable... a) { if (N.isEmpty(a)) { return empty(); } return of(Iterators.concat(a)); } /** * Concatenates multiple iterators into a single stream. * * @param the type of elements in the iterators * @param a the iterators to be concatenated * @return a stream containing all the elements of the provided iterators */ @SafeVarargs public static Stream concat(final Iterator... a) { if (N.isEmpty(a)) { return empty(); } return of(Iterators.concat(a)); } /** * Concatenates multiple streams into a single stream. * * @param the type of elements in the streams * @param a the streams to be concatenated * @return a stream containing all the elements of the provided streams */ @SafeVarargs public static Stream concat(final Stream... a) { if (N.isEmpty(a)) { return empty(); } return concat(Array.asList(a)); } /** * Concatenates multiple streams into a single stream. * * @param the type of elements in the streams * @param streams the collection of stream to be concatenated * @return a stream containing all the elements of the provided streams */ public static Stream concat(final Collection> streams) { if (N.isEmpty(streams)) { return empty(); } return of(new ObjIteratorEx() { private final Iterator> iterators = streams.iterator(); private Stream cur; private Iterator iter; @Override public boolean hasNext() { while ((iter == null || !iter.hasNext()) && iterators.hasNext()) { if (cur != null) { cur.close(); } cur = iterators.next(); iter = cur == null ? null : cur.iteratorEx(); } return iter != null && iter.hasNext(); } @Override public T next() { if ((iter == null || !iter.hasNext()) && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return iter.next(); } }).onClose(newCloseHandler(streams)); } /** * Concatenates multiple iterables into a single stream. * * @param the type of elements in the iterables * @param iterables the collection of iterable to be concatenated * @return a stream containing all the elements of the provided iterables */ @Beta public static Stream concatIterables(final Collection> iterables) { if (N.isEmpty(iterables)) { return empty(); } return of(new ObjIteratorEx<>() { private final Iterator> iterators = iterables.iterator(); private Iterable coll; private Iterator cur; @Override public boolean hasNext() { while ((cur == null || !cur.hasNext()) && iterators.hasNext()) { cur = (coll = iterators.next()) == null ? null : coll.iterator(); } return cur != null && cur.hasNext(); } @Override public T next() { if ((cur == null || !cur.hasNext()) && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return cur.next(); } }); } /** * Concatenates multiple iterators into a single stream. * * @param the type of elements in the iterators * @param c the collection of iterator to be concatenated * @return a stream containing all the elements of the provided iterators */ @Beta public static Stream concatIterators(final Collection> c) { if (N.isEmpty(c)) { return empty(); } return of(new ObjIteratorEx<>() { private final Iterator> iterators = c.iterator(); private Iterator cur; @Override public boolean hasNext() { while ((cur == null || !cur.hasNext()) && iterators.hasNext()) { cur = iterators.next(); } return cur != null && cur.hasNext(); } @Override public T next() { if ((cur == null || !cur.hasNext()) && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return cur.next(); } }); } // /** // * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterables in parallel with multiple threads. // * // * @param the type of elements in the iterables // * @param a the iterables to be concatenated // * @return a parallel stream containing all the elements of the provided iterables // */ // @SafeVarargs // public static Stream parallelConcat(final Iterable... a) { // final int readThreadNum = DEFAULT_READING_THREAD_NUM; // return parallelConcat(a, readThreadNum, calculateBufferedSize(a.length, readThreadNum)); // } // // /** // * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterables in parallel with multiple threads. // * // * @param the type of elements in the iterables // * @param a the iterables to be concatenated // * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) // * @param bufferSize the size of the buffer used to store elements from the iterators read by the reading threads. // * @return a parallel stream containing all the elements of the provided iterables // */ // public static Stream parallelConcat(final Iterable[] a, final int readThreadNum, final int bufferSize) { // if (N.isEmpty(a)) { // return empty(); // } // // return parallelConcatIterables(Array.asList(a), readThreadNum, bufferSize); // } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterators in parallel with multiple threads. * * @param the type of elements in the iterators * @param a the iterators to be concatenated * @return a parallel stream containing all the elements of the provided iterators */ @SafeVarargs public static Stream parallelConcat(final Iterator... a) { final int readThreadNum = DEFAULT_READING_THREAD_NUM; return parallelConcatIterators(Array.asList(a), readThreadNum, calculateBufferedSize(a.length, readThreadNum)); } // /** // * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterators in parallel with multiple threads. // * // * @param the type of elements in the iterators // * @param a the iterators to be concatenated // * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) // * @param bufferSize the size of the buffer used to store elements from the iterators read by the reading threads. // * @return a parallel stream containing all the elements of the provided iterators // */ // public static Stream parallelConcat(final Iterator[] a, final int readThreadNum, final int bufferSize) { // if (N.isEmpty(a)) { // return empty(); // } // // return parallelConcatIterators(Array.asList(a), readThreadNum, bufferSize); // } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified streams in parallel with multiple threads. * * @param the type of elements in the streams * @param a the streams to be concatenated * @return a parallel stream containing all the elements of the provided streams */ @SafeVarargs public static Stream parallelConcat(final Stream... a) { final int readThreadNum = DEFAULT_READING_THREAD_NUM; return parallelConcat(Array.asList(a), readThreadNum, calculateBufferedSize(a.length, readThreadNum)); } // /** // * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified streams in parallel with multiple threads. // * // * @param the type of elements in the streams // * @param a the streams to be concatenated // * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) // * @param bufferSize the size of the buffer used to store elements from the iterators read by the reading threads. // * @return a parallel stream containing all the elements of the provided streams // */ // public static Stream parallelConcat(final Stream[] a, final int readThreadNum, final int bufferSize) { // if (N.isEmpty(a)) { // return empty(); // } // // return parallelConcat(Array.asList(a), readThreadNum, bufferSize); // } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified streams in parallel with multiple threads. * * @param the type of elements in the streams * @param streams the collection of stream to be concatenated * @return a parallel stream containing all the elements of the provided streams */ public static Stream parallelConcat(final Collection> streams) { return parallelConcat(streams, DEFAULT_READING_THREAD_NUM); } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified streams in parallel with multiple threads. * * @param the type of elements in the streams * @param streams the collection of stream to be concatenated * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) * @return a parallel stream containing all the elements of the provided streams */ public static Stream parallelConcat(final Collection> streams, final int readThreadNum) { return parallelConcat(streams, readThreadNum, calculateBufferedSize(streams.size(), readThreadNum)); } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified streams in parallel with multiple threads. * * @param the type of elements in the streams * @param streams the collection of stream to be concatenated * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) * @param bufferSize the size of the buffer used to store elements from the iterators read by the reading threads. * @return a parallel stream containing all the elements of the provided streams */ public static Stream parallelConcat(final Collection> streams, final int readThreadNum, final int bufferSize) { if (N.isEmpty(streams)) { return Stream.empty(); } final MutableBoolean onGoing = MutableBoolean.of(true); final int threadNum = Math.min(streams.size(), readThreadNum); final List> futureList = new ArrayList<>(threadNum); final Holder holderForAsyncExecutorUsed = new Holder<>(); final Supplier> supplier = () -> { final ArrayBlockingQueue queue = new ArrayBlockingQueue<>(bufferSize); final Holder eHolder = new Holder<>(); final MutableBoolean disposableChecked = MutableBoolean.of(false); final Iterator> iterators = streams.iterator(); final AtomicInteger threadCounter = new AtomicInteger(threadNum); boolean noException = false; AsyncExecutor asyncExecutorToUse = checkAsyncExecutor(DEFAULT_ASYNC_EXECUTOR, threadNum, 0); // TODO Warning: Dead lock could happen if the total thread number started by this stream and its upstream is bigger than StreamBase.CORE_THREAD_POOL_SIZE(1024). // If the total thread number started by this stream and its down stream is big, please specified its owner {@code Executor} by {@code parallel(..., Executor)}. // UPDATE: this dead lock problem has been resolved by using BaseStream.execute(...) try { for (int i = 0; i < threadNum; i++) { asyncExecutorToUse = execute(asyncExecutorToUse, threadNum, 0, i, futureList, () -> { try { Stream s = null; Iterator iter = null; while (onGoing.value()) { synchronized (iterators) { if (iterators.hasNext()) { s = iterators.next(); iter = s == null ? ObjIterator.empty() : iterate(s); } else { break; } } T next = null; while (onGoing.value() && iter.hasNext()) { next = iter.next(); if (next == null) { next = (T) NONE; } else if (disposableChecked.isFalse()) { disposableChecked.setTrue(); if (next instanceof NoCachingNoUpdating) { throw new IllegalStateException("Can't run NoCachingNoUpdating Objects in parallel Stream or Queue"); } } if (!queue.offer(next)) { // int cnt = 0; while (onGoing.value()) { if (queue.offer(next, MAX_WAIT_TIME_FOR_QUEUE_OFFER, TimeUnit.MILLISECONDS)) { break; } // cnt++; // // if ((cnt % 128) == 0) { // if (logger.isWarnEnabled()) { // logger.warn("Has been waiting for " + cnt * MAX_WAIT_TIME_FOR_QUEUE_OFFER // + " milliseconds to add next element to queue. Maybe dead lock. Please refer to java doc for Stream.parallel(...) to avoid potential dead lock"); // } // } // if (MAX_WAIT_TIME_FOR_QUEUE_OFFER * cnt >= MAX_WAIT_TO_BREAK_FOR_DEAD_LOCK) { // throw new RuntimeException("Wait too long(" + MAX_WAIT_TIME_FOR_QUEUE_OFFER * cnt // + " milliseconds) to add next element to queue. Break for potential dead lock"); // } } } } if (s != null) { s.close(); } } } catch (final Exception e) { setError(eHolder, e, onGoing); } finally { threadCounter.decrementAndGet(); } }); } noException = true; } finally { if (!noException) { onGoing.setFalse(); } } holderForAsyncExecutorUsed.setValue(asyncExecutorToUse); return new BufferedIterator<>(bufferSize) { T next = null; @Override public boolean hasNext() { try { if (next == null && (next = queue.poll()) == null) { // int cnt = 0; while (onGoing.value() && (threadCounter.get() > 0 || queue.size() > 0)) { // (queue.size() > 0 || counter.get() > 0) is wrong. has to check counter first if ((next = queue.poll(MAX_WAIT_TIME_FOR_QUEUE_POLL, TimeUnit.MILLISECONDS)) != null) { break; } // cnt++; // // if ((cnt % 128) == 0) { // if (logger.isWarnEnabled()) { // logger.warn("Has been waiting for " + cnt * MAX_WAIT_TIME_FOR_QUEUE_POLL // + " milliseconds to poll next element from queue. Maybe dead lock. Please refer to java doc for Stream.parallel(...) to avoid potential dead lock"); // } // } // if (MAX_WAIT_TIME_FOR_QUEUE_POLL * cnt >= MAX_WAIT_TO_BREAK_FOR_DEAD_LOCK) { // throw new RuntimeException("Wait too long(" + MAX_WAIT_TIME_FOR_QUEUE_POLL * cnt // + " milliseconds) to poll next element from queue. Break for potential dead lock"); // } } } } catch (final Exception e) { setError(eHolder, e, onGoing); } if (eHolder.value() != null) { setStopFlagAndThrowException(eHolder, onGoing); } return next != null; } @Override public T next() { if (next == null && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T result = next == NONE ? null : next; next = null; return result; } }; }; //noinspection resource return just(supplier).flatMap(it -> Stream.of(it.get())).onClose(newCloseHandler(streams)).onClose(() -> { onGoing.setFalse(); if (holderForAsyncExecutorUsed.isNotNull()) { shutdownTempExecutor(holderForAsyncExecutorUsed.value()); } // cancelAll(futureList); }); } // /** // * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterables in parallel with multiple threads. // * // * @param the type of elements in the iterables // * @param iterables the collection of iterable to be concatenated // * @return a parallel stream containing all the elements of the provided iterables // */ // @Beta // public static Stream parallelConcatIterables(final Collection> iterables) { // return parallelConcatIterables(iterables, DEFAULT_READING_THREAD_NUM); // } // // /** // * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterables in parallel with multiple threads. // * // * @param the type of elements in the iterables // * @param iterables the collection of iterable to be concatenated // * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) // * @return a parallel stream containing all the elements of the provided iterables // */ // @Beta // public static Stream parallelConcatIterables(final Collection> iterables, final int readThreadNum) { // return parallelConcatIterables(iterables, readThreadNum, calculateBufferedSize(iterables.size(), readThreadNum)); // } // // /** // * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterables in parallel with multiple threads. // * // * @param the type of elements in the iterables // * @param iterables the collection of iterable to be concatenated // * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) // * @param bufferSize the size of the buffer used to store elements from the iterators read by the reading threads. // * @return a parallel stream containing all the elements of the provided iterables // */ // @Beta // public static Stream parallelConcatIterables(final Collection> iterables, final int readThreadNum, // final int bufferSize) { // if (N.isEmpty(iterables)) { // return Stream.empty(); // } // // return parallelConcatIterators(Stream.of(iterables).skipNulls().map(Iterable::iterator).toList(), readThreadNum, bufferSize); // } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterators in parallel with multiple threads. * * @param the type of elements in the iterators * @param iterators the collection of iterator to be concatenated * @return a parallel stream containing all the elements of the provided iterators */ public static Stream parallelConcatIterators(final Collection> iterators) { return parallelConcatIterators(iterators, DEFAULT_READING_THREAD_NUM); } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterators in parallel with multiple threads. * * @param the type of elements in the iterators * @param iterators the collection of iterator to be concatenated * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) * @return a parallel stream containing all the elements of the provided iterators */ public static Stream parallelConcatIterators(final Collection> iterators, final int readThreadNum) { return parallelConcatIterators(iterators, readThreadNum, 0, false, null); } /** * Returns a Stream with elements from a temporary queue which is filled by fetching elements from the specified iterators in parallel with multiple threads. * * @param the type of elements in the iterators * @param iterators the collection of iterator to be concatenated * @param readThreadNum - count of threads used to read elements from iterator to queue. Default value is min(64, cup_cores) * @param bufferSize the size of the buffer used to store elements from the iterators read by the reading threads. * @return a parallel stream containing all the elements of the provided iterators */ public static Stream parallelConcatIterators(final Collection> iterators, final int readThreadNum, final int bufferSize) { if (N.isEmpty(iterators)) { return Stream.empty(); } final ArrayBlockingQueue queue = new ArrayBlockingQueue<>(bufferSize); return parallelConcatIterators(iterators, readThreadNum, queue); } static Stream parallelConcatIterators(final Collection> iterators, final int readThreadNum, final ArrayBlockingQueue queue) { if (N.isEmpty(iterators)) { return Stream.empty(); } return parallelConcatIterators(iterators, readThreadNum, 0, queue, false, null); } static Stream parallelConcatIterators(final Collection> iterators, final int readThreadNum, final int executorNumForVirtualThread, final boolean cancelUncompletedThreads, final AsyncExecutor asyncExecutor) { return parallelConcatIterators(iterators, readThreadNum, executorNumForVirtualThread, calculateBufferedSize(iterators.size(), readThreadNum), cancelUncompletedThreads, asyncExecutor); } static Stream parallelConcatIterators(final Collection> iterators, final int readThreadNum, final int executorNumForVirtualThread, final int bufferSize, final boolean cancelUncompletedThreads, final AsyncExecutor asyncExecutor) { if (N.isEmpty(iterators)) { return Stream.empty(); } final ArrayBlockingQueue queue = new ArrayBlockingQueue<>(bufferSize); return parallelConcatIterators(iterators, readThreadNum, executorNumForVirtualThread, queue, cancelUncompletedThreads, asyncExecutor); } static Stream parallelConcatIterators(final Collection> iterators, final int readThreadNum, final int executorNumForVirtualThread, final ArrayBlockingQueue queue, final boolean cancelUncompletedThreads, final AsyncExecutor asyncExecutor) { if (N.isEmpty(iterators)) { return Stream.empty(); } final Supplier> supplier = () -> buffered(iterators, readThreadNum, executorNumForVirtualThread, queue, cancelUncompletedThreads, asyncExecutor, null); //noinspection resource return just(supplier).map(Supplier::get).flatMap(it -> Stream.of(it).onClose(it::close)); } static BufferedIterator buffered(final Iterator iter) { if (iter instanceof BufferedIterator) { return (BufferedIterator) iter; } return buffered(iter, new ArrayBlockingQueue<>(DEFAULT_BUFFERED_SIZE_PER_ITERATOR)); } static BufferedIterator buffered(final Iterator iter, final BlockingQueue queue) { return buffered(iter, queue, null); } static BufferedIterator buffered(final Iterator iter, final BlockingQueue queue, final MutableBoolean hasMore) { return buffered(N.asList(iter), 1, 0, queue, false, null, hasMore); } static BufferedIterator buffered(final Collection> iterators, final int readThreadNum, final int executorNumForVirtualThread, final BlockingQueue queue, final boolean cancelUncompletedThreads, final AsyncExecutor asyncExecutor, final MutableBoolean hasMore) { if (N.isEmpty(iterators)) { return BufferedIterator.empty(); } final int bufferSize = queue.remainingCapacity(); final MutableBoolean onGoing = MutableBoolean.of(true); final int threadNum = Math.min(iterators.size(), readThreadNum); final Holder holderForAsyncExecutorUsed = new Holder<>(); final T none = (T) NONE; final Holder eHolder = new Holder<>(); final MutableBoolean disposableChecked = MutableBoolean.of(false); final List> futureList = cancelUncompletedThreads ? new ArrayList<>(threadNum) : null; final Iterator> iters = iterators.iterator(); final AtomicInteger threadCounter = new AtomicInteger(threadNum); boolean noException = false; AsyncExecutor asyncExecutorToUse = checkAsyncExecutor(asyncExecutor, threadNum, executorNumForVirtualThread); // TODO Warning: Dead lock could happen if the total thread number started by this stream and its upstream is bigger than StreamBase.CORE_THREAD_POOL_SIZE(1024). // If the total thread number started by this stream and its down stream is big, please specified its owner {@code Executor} by {@code parallel(..., Executor)}. // UPDATE: this dead lock problem has been resolved by using BaseStream.execute(...) try { for (int i = 0; i < threadNum; i++) { asyncExecutorToUse = execute(asyncExecutorToUse, threadNum, executorNumForVirtualThread, i, futureList, () -> { try { while (onGoing.value()) { Iterator iter = null; synchronized (iters) { if (iters.hasNext()) { iter = iters.next(); } else { break; } } T next = null; while (onGoing.value() && iter.hasNext()) { next = iter.next(); if (next == null) { next = none; } else if (disposableChecked.isFalse()) { disposableChecked.setTrue(); if (next instanceof NoCachingNoUpdating) { throw new IllegalStateException("Can't run NoCachingNoUpdating Objects in parallel Stream or Queue"); } } if (!queue.offer(next)) { // int cnt = 0; while (onGoing.value()) { if (queue.offer(next, MAX_WAIT_TIME_FOR_QUEUE_OFFER, TimeUnit.MILLISECONDS)) { break; } // cnt++; // // if ((cnt % 128) == 0) { // if (logger.isWarnEnabled()) { // logger.warn("Has been waiting for " + cnt * MAX_WAIT_TIME_FOR_QUEUE_OFFER // + " milliseconds to add next element to queue. Maybe dead lock. Please refer to java doc for Stream.parallel(...) to avoid potential dead lock"); // } // } // if (MAX_WAIT_TIME_FOR_QUEUE_OFFER * cnt >= MAX_WAIT_TO_BREAK_FOR_DEAD_LOCK) { // throw new RuntimeException("Wait too long(" + MAX_WAIT_TIME_FOR_QUEUE_OFFER * cnt // + " milliseconds) to add next element to queue. Break for potential dead lock"); // } } } } } } catch (final Exception e) { setError(eHolder, e, onGoing); } finally { if ((threadCounter.decrementAndGet() == 0) && (hasMore != null)) { hasMore.setFalse(); } } }); } noException = true; } finally { if (!noException) { onGoing.setFalse(); if (hasMore != null) { hasMore.setFalse(); } } } holderForAsyncExecutorUsed.setValue(asyncExecutorToUse); return new BufferedIterator<>(bufferSize) { private boolean isClosed = false; private T next = null; @Override public boolean hasNext() { try { if (next == null && (next = queue.poll()) == null) { // int cnt = 0; // "ForkJoinPool-1-worker-7" #105 [15944] daemon prio=5 os_prio=0 cpu=0.00ms elapsed=230.72s tid=0x00000296646f59a0 [0x000000c42adfe000] // Carrying virtual thread #203 // at jdk.internal.vm.Continuation.run(java.base@21/Continuation.java:248) // at java.lang.VirtualThread.runContinuation(java.base@21/VirtualThread.java:223) // at java.lang.VirtualThread$$Lambda/0x000002961f346de8.run(java.base@21/Unknown Source) // at java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(java.base@21/ForkJoinTask.java:1423) // at java.util.concurrent.ForkJoinTask.doExec(java.base@21/ForkJoinTask.java:387) // at java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(java.base@21/ForkJoinPool.java:1312) // at java.util.concurrent.ForkJoinPool.scan(java.base@21/ForkJoinPool.java:1843) // at java.util.concurrent.ForkJoinPool.runWorker(java.base@21/ForkJoinPool.java:1808) // at java.util.concurrent.ForkJoinWorkerThread.run(java.base@21/ForkJoinWorkerThread.java:188) // // Locked ownable synchronizers: // - None // // "ForkJoinPool-1-worker-8" #111 [15056] daemon prio=5 os_prio=0 cpu=0.00ms elapsed=230.72s tid=0x00000296646f7a70 [0x000000c42aefe000] // Carrying virtual thread #202 // at jdk.internal.vm.Continuation.run(java.base@21/Continuation.java:248) // at java.lang.VirtualThread.runContinuation(java.base@21/VirtualThread.java:223) // at java.lang.VirtualThread$$Lambda/0x000002961f346de8.run(java.base@21/Unknown Source) // at java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(java.base@21/ForkJoinTask.java:1423) // at java.util.concurrent.ForkJoinTask.doExec(java.base@21/ForkJoinTask.java:387) // at java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(java.base@21/ForkJoinPool.java:1312) // at java.util.concurrent.ForkJoinPool.scan(java.base@21/ForkJoinPool.java:1843) // at java.util.concurrent.ForkJoinPool.runWorker(java.base@21/ForkJoinPool.java:1808) // at java.util.concurrent.ForkJoinWorkerThread.run(java.base@21/ForkJoinWorkerThread.java:188) // // Locked ownable synchronizers: // - None // // "VirtualThread-unparker" #234 [19904] daemon prio=5 os_prio=0 cpu=0.00ms elapsed=230.72s tid=0x00000296646f6030 nid=19904 waiting on condition [0x000000c42affe000] // java.lang.Thread.State: WAITING (parking) // at jdk.internal.misc.Unsafe.park(java.base@21/Native Method) // - parking to wait for <0x0000000685c5cfc0> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) // at java.util.concurrent.locks.LockSupport.park(java.base@21/LockSupport.java:371) // at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(java.base@21/AbstractQueuedSynchronizer.java:519) // at java.util.concurrent.ForkJoinPool.unmanagedBlock(java.base@21/ForkJoinPool.java:3780) // at java.util.concurrent.ForkJoinPool.managedBlock(java.base@21/ForkJoinPool.java:3725) // at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(java.base@21/AbstractQueuedSynchronizer.java:1707) // at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(java.base@21/ScheduledThreadPoolExecutor.java:1170) // at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(java.base@21/ScheduledThreadPoolExecutor.java:899) // at java.util.concurrent.ThreadPoolExecutor.getTask(java.base@21/ThreadPoolExecutor.java:1070) // at java.util.concurrent.ThreadPoolExecutor.runWorker(java.base@21/ThreadPoolExecutor.java:1130) // at java.util.concurrent.ThreadPoolExecutor$Worker.run(java.base@21/ThreadPoolExecutor.java:642) // at java.lang.Thread.runWith(java.base@21/Thread.java:1596) // at java.lang.Thread.run(java.base@21/Thread.java:1583) // at jdk.internal.misc.InnocuousThread.run(java.base@21/InnocuousThread.java:186) // Dead lock or endless loop here for virtual thread executor. // Maybe the root cause is ForkJoinPool is used for virtual thread execution. // Test: StreamTest.test_virtual_threads while (onGoing.value() && (threadCounter.get() > 0 || queue.size() > 0)) { // (queue.size() > 0 || counter.get() > 0) is wrong. has to check counter first if ((next = queue.poll(MAX_WAIT_TIME_FOR_QUEUE_POLL, TimeUnit.MILLISECONDS)) != null) { break; } // cnt++; // // if ((cnt % 128) == 0) { // if (logger.isWarnEnabled()) { // logger.warn("Has been waiting for " + cnt * MAX_WAIT_TIME_FOR_QUEUE_POLL // + " milliseconds to poll next element from queue. Maybe dead lock. Please refer to java doc for Stream.parallel(...) to avoid potential dead lock"); // } // } // if (MAX_WAIT_TIME_FOR_QUEUE_POLL * cnt >= MAX_WAIT_TO_BREAK_FOR_DEAD_LOCK) { // throw new RuntimeException("Wait too long(" + MAX_WAIT_TIME_FOR_QUEUE_POLL * cnt // + " milliseconds) to poll next element from queue. Break for potential dead lock"); // } } } } catch (final Exception e) { setError(eHolder, e, onGoing); } if (eHolder.value() != null) { setStopFlagAndThrowException(eHolder, onGoing); } return next != null; } @Override public T next() { if (next == null && !hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } final T result = next == none ? null : next; next = null; return result; } @Override public void close() { if (isClosed) { return; } isClosed = true; onGoing.setFalse(); try { if (cancelUncompletedThreads) { // this will always be false? // cancelAll(futureList); // TODO canceling the task will impact StreamBase.activeThreadNum? It has been fixed now - 20220815 } } finally { if (holderForAsyncExecutorUsed.isNotNull()) { final AsyncExecutor asyncExecutorToUse = holderForAsyncExecutorUsed.value(); shutdownTempExecutor(asyncExecutorToUse, asyncExecutor); } } } }; } /** * Zips two arrays of characters into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first array of characters * @param b the second array of characters * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final char[] a, final char[] b, final CharBiFunction zipFunction) { return zip(CharIteratorEx.of(a), CharIteratorEx.of(b), zipFunction); } /** * Zips three arrays of characters into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first array of characters * @param b the second array of characters * @param c the third array of characters * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final char[] a, final char[] b, final char[] c, final CharTriFunction zipFunction) { return zip(CharIteratorEx.of(a), CharIteratorEx.of(b), CharIteratorEx.of(c), zipFunction); } /** * Zips two character iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first character iterator * @param b the second character iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final CharIterator a, final CharIterator b, final CharBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final CharIterator iterA = a == null ? CharIterator.empty() : a; private final CharIterator iterB = b == null ? CharIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextChar(), iterB.nextChar()); } }); } /** * Zips three character iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first character iterator * @param b the second character iterator * @param c the third character iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final CharIterator a, final CharIterator b, final CharIterator c, final CharTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final CharIterator iterA = a == null ? CharIterator.empty() : a; private final CharIterator iterB = b == null ? CharIterator.empty() : b; private final CharIterator iterC = c == null ? CharIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextChar(), iterB.nextChar(), iterC.nextChar()); } }); } /** * Zips two character streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first character stream * @param b the second character stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final CharStream a, final CharStream b, final CharBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three character streams into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first character stream * @param b the second character stream * @param c the third character stream * @param zipFunction the function to combine triples of values from the streams * @return a stream of combined values */ public static Stream zip(final CharStream a, final CharStream b, final CharStream c, final CharTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of character streams into a single stream until one of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param c the collection of character streams * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final CharNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); final CharIterator[] iters = new CharIterator[len]; int i = 0; for (final CharStream s : c) { iters[i++] = iterate(s); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final char[] args = new char[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].nextChar(); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two character arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first character array * @param b the second character array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final char[] a, final char[] b, final char valueForNoneA, final char valueForNoneB, final CharBiFunction zipFunction) { return zip(CharIteratorEx.of(a), CharIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three character arrays into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first character array * @param b the second character array * @param c the third character array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final char[] a, final char[] b, final char[] c, final char valueForNoneA, final char valueForNoneB, final char valueForNoneC, final CharTriFunction zipFunction) { return zip(CharIteratorEx.of(a), CharIteratorEx.of(b), CharIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two character iterators into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first character iterator * @param b the second character iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final CharIterator a, final CharIterator b, final char valueForNoneA, final char valueForNoneB, final CharBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final CharIterator iterA = a == null ? CharIterator.empty() : a; private final CharIterator iterB = b == null ? CharIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextChar(), iterB.hasNext() ? iterB.nextChar() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.nextChar()); } } }); } /** * Zips three character iterators into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first character iterator * @param b the second character iterator * @param c the third character iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final CharIterator a, final CharIterator b, final CharIterator c, final char valueForNoneA, final char valueForNoneB, final char valueForNoneC, final CharTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final CharIterator iterA = a == null ? CharIterator.empty() : a; private final CharIterator iterB = b == null ? CharIterator.empty() : b; private final CharIterator iterC = c == null ? CharIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextChar(), iterB.hasNext() ? iterB.nextChar() : valueForNoneB, iterC.hasNext() ? iterC.nextChar() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.nextChar(), iterC.hasNext() ? iterC.nextChar() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.nextChar()); } } }); } /** * Zips two character streams into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first character stream * @param b the second character stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final CharStream a, final CharStream b, final char valueForNoneA, final char valueForNoneB, final CharBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three character streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the result * @param a the first character stream * @param b the second character stream * @param c the third character stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final CharStream a, final CharStream b, final CharStream c, final char valueForNoneA, final char valueForNoneB, final char valueForNoneC, final CharTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of character streams into a single stream until all of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valuesForNone are used. * * @param the type of the result * @param c the collection of character streams * @param valuesForNone the values to use if the streams run out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final char[] valuesForNone, final CharNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); if (len != valuesForNone.length) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final CharStream[] ss = c.toArray(new CharStream[len]); final CharIterator[] iters = new CharIterator[len]; for (int i = 0; i < len; i++) { iters[i] = iterate(ss[i]); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; ss[i].close(); } } } return false; } @Override public R next() { final char[] args = new char[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].nextChar(); } else { args[i] = valuesForNone[i]; } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two byte arrays into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first byte array * @param b the second byte array * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final byte[] a, final byte[] b, final ByteBiFunction zipFunction) { return zip(ByteIteratorEx.of(a), ByteIteratorEx.of(b), zipFunction); } /** * Zips three byte arrays into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first byte array * @param b the second byte array * @param c the third byte array * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final byte[] a, final byte[] b, final byte[] c, final ByteTriFunction zipFunction) { return zip(ByteIteratorEx.of(a), ByteIteratorEx.of(b), ByteIteratorEx.of(c), zipFunction); } /** * Zips two byte iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first byte iterator * @param b the second byte iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final ByteIterator a, final ByteIterator b, final ByteBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ByteIterator iterA = a == null ? ByteIterator.empty() : a; private final ByteIterator iterB = b == null ? ByteIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextByte(), iterB.nextByte()); } }); } /** * Zips three byte iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first byte iterator * @param b the second byte iterator * @param c the third byte iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final ByteIterator a, final ByteIterator b, final ByteIterator c, final ByteTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ByteIterator iterA = a == null ? ByteIterator.empty() : a; private final ByteIterator iterB = b == null ? ByteIterator.empty() : b; private final ByteIterator iterC = c == null ? ByteIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextByte(), iterB.nextByte(), iterC.nextByte()); } }); } /** * Zips two byte streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first byte stream * @param b the second byte stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined */ public static Stream zip(final ByteStream a, final ByteStream b, final ByteBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three byte streams into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first byte stream * @param b the second byte stream * @param c the third byte stream * @param zipFunction the function to combine triples of values from the streams * @return a stream of combined values */ public static Stream zip(final ByteStream a, final ByteStream b, final ByteStream c, final ByteTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of byte streams into a single stream until one of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param c the collection of byte streams * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final ByteNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); final ByteIterator[] iters = new ByteIterator[len]; int i = 0; for (final ByteStream s : c) { iters[i++] = iterate(s); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final byte[] args = new byte[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].nextByte(); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two byte arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first byte array * @param b the second byte array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final byte[] a, final byte[] b, final byte valueForNoneA, final byte valueForNoneB, final ByteBiFunction zipFunction) { return zip(ByteIteratorEx.of(a), ByteIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three byte arrays into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first byte array * @param b the second byte array * @param c the third byte array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triples of values from the arrays */ public static Stream zip(final byte[] a, final byte[] b, final byte[] c, final byte valueForNoneA, final byte valueForNoneB, final byte valueForNoneC, final ByteTriFunction zipFunction) { return zip(ByteIteratorEx.of(a), ByteIteratorEx.of(b), ByteIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two byte iterators into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first byte iterator * @param b the second byte iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final ByteIterator a, final ByteIterator b, final byte valueForNoneA, final byte valueForNoneB, final ByteBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ByteIterator iterA = a == null ? ByteIterator.empty() : a; private final ByteIterator iterB = b == null ? ByteIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextByte(), iterB.hasNext() ? iterB.nextByte() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.nextByte()); } } }); } /** * Zips three byte iterators into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first byte iterator * @param b the second byte iterator * @param c the third byte iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final ByteIterator a, final ByteIterator b, final ByteIterator c, final byte valueForNoneA, final byte valueForNoneB, final byte valueForNoneC, final ByteTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ByteIterator iterA = a == null ? ByteIterator.empty() : a; private final ByteIterator iterB = b == null ? ByteIterator.empty() : b; private final ByteIterator iterC = c == null ? ByteIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextByte(), iterB.hasNext() ? iterB.nextByte() : valueForNoneB, iterC.hasNext() ? iterC.nextByte() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.nextByte(), iterC.hasNext() ? iterC.nextByte() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.nextByte()); } } }); } /** * Zips two byte streams into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first byte stream * @param b the second byte stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final ByteStream a, final ByteStream b, final byte valueForNoneA, final byte valueForNoneB, final ByteBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three byte streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the result * @param a the first byte stream * @param b the second byte stream * @param c the third byte stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final ByteStream a, final ByteStream b, final ByteStream c, final byte valueForNoneA, final byte valueForNoneB, final byte valueForNoneC, final ByteTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of byte streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valuesForNone are used. * * @param the type of the result * @param c the collection of byte streams * @param valuesForNone the values to use if the streams run out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final byte[] valuesForNone, final ByteNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); if (len != valuesForNone.length) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final ByteStream[] ss = c.toArray(new ByteStream[len]); final ByteIterator[] iters = new ByteIterator[len]; for (int i = 0; i < len; i++) { iters[i] = iterate(ss[i]); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; ss[i].close(); } } } return false; } @Override public R next() { final byte[] args = new byte[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].nextByte(); } else { args[i] = valuesForNone[i]; } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two short arrays into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first short array * @param b the second short array * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final short[] a, final short[] b, final ShortBiFunction zipFunction) { return zip(ShortIteratorEx.of(a), ShortIteratorEx.of(b), zipFunction); } /** * Zips three short arrays into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first short array * @param b the second short array * @param c the third short array * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final short[] a, final short[] b, final short[] c, final ShortTriFunction zipFunction) { return zip(ShortIteratorEx.of(a), ShortIteratorEx.of(b), ShortIteratorEx.of(c), zipFunction); } /** * Zips two short iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first short iterator * @param b the second short iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final ShortIterator a, final ShortIterator b, final ShortBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ShortIterator iterA = a == null ? ShortIterator.empty() : a; private final ShortIterator iterB = b == null ? ShortIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextShort(), iterB.nextShort()); } }); } /** * Zips three short iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first short iterator * @param b the second short iterator * @param c the third short iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final ShortIterator a, final ShortIterator b, final ShortIterator c, final ShortTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ShortIterator iterA = a == null ? ShortIterator.empty() : a; private final ShortIterator iterB = b == null ? ShortIterator.empty() : b; private final ShortIterator iterC = c == null ? ShortIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextShort(), iterB.nextShort(), iterC.nextShort()); } }); } /** * Zips two short streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first short stream * @param b the second short stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined */ public static Stream zip(final ShortStream a, final ShortStream b, final ShortBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three short streams into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first short stream * @param b the second short stream * @param c the third short stream * @param zipFunction the function to combine triples of values from the streams * @return a stream of combined values */ public static Stream zip(final ShortStream a, final ShortStream b, final ShortStream c, final ShortTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of short streams into a single stream until one of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param c the collection of short streams * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final ShortNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); final ShortIterator[] iters = new ShortIterator[len]; int i = 0; for (final ShortStream s : c) { iters[i++] = iterate(s); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final short[] args = new short[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].nextShort(); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two short arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first short array * @param b the second short array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final short[] a, final short[] b, final short valueForNoneA, final short valueForNoneB, final ShortBiFunction zipFunction) { return zip(ShortIteratorEx.of(a), ShortIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three short arrays into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first short array * @param b the second short array * @param c the third short array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triples of values from the arrays */ public static Stream zip(final short[] a, final short[] b, final short[] c, final short valueForNoneA, final short valueForNoneB, final short valueForNoneC, final ShortTriFunction zipFunction) { return zip(ShortIteratorEx.of(a), ShortIteratorEx.of(b), ShortIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two short iterators into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first short iterator * @param b the second short iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final ShortIterator a, final ShortIterator b, final short valueForNoneA, final short valueForNoneB, final ShortBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ShortIterator iterA = a == null ? ShortIterator.empty() : a; private final ShortIterator iterB = b == null ? ShortIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextShort(), iterB.hasNext() ? iterB.nextShort() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.nextShort()); } } }); } /** * Zips three short iterators into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first short iterator * @param b the second short iterator * @param c the third short iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final ShortIterator a, final ShortIterator b, final ShortIterator c, final short valueForNoneA, final short valueForNoneB, final short valueForNoneC, final ShortTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final ShortIterator iterA = a == null ? ShortIterator.empty() : a; private final ShortIterator iterB = b == null ? ShortIterator.empty() : b; private final ShortIterator iterC = c == null ? ShortIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextShort(), iterB.hasNext() ? iterB.nextShort() : valueForNoneB, iterC.hasNext() ? iterC.nextShort() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.nextShort(), iterC.hasNext() ? iterC.nextShort() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.nextShort()); } } }); } /** * Zips two short streams into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first short stream * @param b the second short stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final ShortStream a, final ShortStream b, final short valueForNoneA, final short valueForNoneB, final ShortBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three short streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the result * @param a the first short stream * @param b the second short stream * @param c the third short stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final ShortStream a, final ShortStream b, final ShortStream c, final short valueForNoneA, final short valueForNoneB, final short valueForNoneC, final ShortTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of short streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valuesForNone are used. * * @param the type of the result * @param c the collection of short streams * @param valuesForNone the values to use if the streams run out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final short[] valuesForNone, final ShortNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); if (len != valuesForNone.length) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final ShortStream[] ss = c.toArray(new ShortStream[len]); final ShortIterator[] iters = new ShortIterator[len]; for (int i = 0; i < len; i++) { iters[i] = iterate(ss[i]); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; ss[i].close(); } } } return false; } @Override public R next() { final short[] args = new short[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].nextShort(); } else { args[i] = valuesForNone[i]; } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two int arrays into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first int array * @param b the second int array * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final int[] a, final int[] b, final IntBiFunction zipFunction) { return zip(IntIteratorEx.of(a), IntIteratorEx.of(b), zipFunction); } /** * Zips three int arrays into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first int array * @param b the second int array * @param c the third int array * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final int[] a, final int[] b, final int[] c, final IntTriFunction zipFunction) { return zip(IntIteratorEx.of(a), IntIteratorEx.of(b), IntIteratorEx.of(c), zipFunction); } /** * Zips two int iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first int iterator * @param b the second int iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final IntIterator a, final IntIterator b, final IntBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final IntIterator iterA = a == null ? IntIterator.empty() : a; private final IntIterator iterB = b == null ? IntIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextInt(), iterB.nextInt()); } }); } /** * Zips three int iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first int iterator * @param b the second int iterator * @param c the third int iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final IntIterator a, final IntIterator b, final IntIterator c, final IntTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final IntIterator iterA = a == null ? IntIterator.empty() : a; private final IntIterator iterB = b == null ? IntIterator.empty() : b; private final IntIterator iterC = c == null ? IntIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextInt(), iterB.nextInt(), iterC.nextInt()); } }); } /** * Zips two int streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first int stream * @param b the second int stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined */ public static Stream zip(final IntStream a, final IntStream b, final IntBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three int streams into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first int stream * @param b the second int stream * @param c the third int stream * @param zipFunction the function to combine triples of values from the streams * @return a stream of combined values */ public static Stream zip(final IntStream a, final IntStream b, final IntStream c, final IntTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of int streams into a single stream until one of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param c the collection of int streams * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final IntNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); final IntIterator[] iters = new IntIterator[len]; int i = 0; for (final IntStream s : c) { iters[i++] = iterate(s); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final int[] args = new int[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].nextInt(); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two int arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first int array * @param b the second int array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final int[] a, final int[] b, final int valueForNoneA, final int valueForNoneB, final IntBiFunction zipFunction) { return zip(IntIteratorEx.of(a), IntIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three int arrays into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first int array * @param b the second int array * @param c the third int array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triples of values from the arrays */ public static Stream zip(final int[] a, final int[] b, final int[] c, final int valueForNoneA, final int valueForNoneB, final int valueForNoneC, final IntTriFunction zipFunction) { return zip(IntIteratorEx.of(a), IntIteratorEx.of(b), IntIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two int iterators into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first int iterator * @param b the second int iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final IntIterator a, final IntIterator b, final int valueForNoneA, final int valueForNoneB, final IntBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final IntIterator iterA = a == null ? IntIterator.empty() : a; private final IntIterator iterB = b == null ? IntIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextInt(), iterB.hasNext() ? iterB.nextInt() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.nextInt()); } } }); } /** * Zips three int iterators into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first int iterator * @param b the second int iterator * @param c the third int iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final IntIterator a, final IntIterator b, final IntIterator c, final int valueForNoneA, final int valueForNoneB, final int valueForNoneC, final IntTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final IntIterator iterA = a == null ? IntIterator.empty() : a; private final IntIterator iterB = b == null ? IntIterator.empty() : b; private final IntIterator iterC = c == null ? IntIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextInt(), iterB.hasNext() ? iterB.nextInt() : valueForNoneB, iterC.hasNext() ? iterC.nextInt() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.nextInt(), iterC.hasNext() ? iterC.nextInt() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.nextInt()); } } }); } /** * Zips two int streams into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first int stream * @param b the second int stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final IntStream a, final IntStream b, final int valueForNoneA, final int valueForNoneB, final IntBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three int streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the result * @param a the first int stream * @param b the second int stream * @param c the third int stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final IntStream a, final IntStream b, final IntStream c, final int valueForNoneA, final int valueForNoneB, final int valueForNoneC, final IntTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of int streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valuesForNone are used. * * @param the type of the result * @param c the collection of int streams * @param valuesForNone the values to use if the streams run out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final int[] valuesForNone, final IntNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); if (len != valuesForNone.length) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final IntStream[] ss = c.toArray(new IntStream[len]); final IntIterator[] iters = new IntIterator[len]; for (int i = 0; i < len; i++) { iters[i] = iterate(ss[i]); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; ss[i].close(); } } } return false; } @Override public R next() { final int[] args = new int[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].nextInt(); } else { args[i] = valuesForNone[i]; } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two long arrays into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first long array * @param b the second long array * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final long[] a, final long[] b, final LongBiFunction zipFunction) { return zip(LongIteratorEx.of(a), LongIteratorEx.of(b), zipFunction); } /** * Zips three long arrays into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first long array * @param b the second long array * @param c the third long array * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final long[] a, final long[] b, final long[] c, final LongTriFunction zipFunction) { return zip(LongIteratorEx.of(a), LongIteratorEx.of(b), LongIteratorEx.of(c), zipFunction); } /** * Zips two long iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first long iterator * @param b the second long iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final LongIterator a, final LongIterator b, final LongBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final LongIterator iterA = a == null ? LongIterator.empty() : a; private final LongIterator iterB = b == null ? LongIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextLong(), iterB.nextLong()); } }); } /** * Zips three long iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first long iterator * @param b the second long iterator * @param c the third long iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final LongIterator a, final LongIterator b, final LongIterator c, final LongTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final LongIterator iterA = a == null ? LongIterator.empty() : a; private final LongIterator iterB = b == null ? LongIterator.empty() : b; private final LongIterator iterC = c == null ? LongIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextLong(), iterB.nextLong(), iterC.nextLong()); } }); } /** * Zips two long streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first long stream * @param b the second long stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined */ public static Stream zip(final LongStream a, final LongStream b, final LongBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three long streams into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first long stream * @param b the second long stream * @param c the third long stream * @param zipFunction the function to combine triples of values from the streams * @return a stream of combined values */ public static Stream zip(final LongStream a, final LongStream b, final LongStream c, final LongTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of long streams into a single stream until one of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param c the collection of long streams * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final LongNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); final LongIterator[] iters = new LongIterator[len]; int i = 0; for (final LongStream s : c) { iters[i++] = iterate(s); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final long[] args = new long[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].nextLong(); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two long arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first long array * @param b the second long array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final long[] a, final long[] b, final long valueForNoneA, final long valueForNoneB, final LongBiFunction zipFunction) { return zip(LongIteratorEx.of(a), LongIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three long arrays into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first long array * @param b the second long array * @param c the third long array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triples of values from the arrays */ public static Stream zip(final long[] a, final long[] b, final long[] c, final long valueForNoneA, final long valueForNoneB, final long valueForNoneC, final LongTriFunction zipFunction) { return zip(LongIteratorEx.of(a), LongIteratorEx.of(b), LongIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two long iterators into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first long iterator * @param b the second long iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final LongIterator a, final LongIterator b, final long valueForNoneA, final long valueForNoneB, final LongBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final LongIterator iterA = a == null ? LongIterator.empty() : a; private final LongIterator iterB = b == null ? LongIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextLong(), iterB.hasNext() ? iterB.nextLong() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.nextLong()); } } }); } /** * Zips three long iterators into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first long iterator * @param b the second long iterator * @param c the third long iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final LongIterator a, final LongIterator b, final LongIterator c, final long valueForNoneA, final long valueForNoneB, final long valueForNoneC, final LongTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final LongIterator iterA = a == null ? LongIterator.empty() : a; private final LongIterator iterB = b == null ? LongIterator.empty() : b; private final LongIterator iterC = c == null ? LongIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextLong(), iterB.hasNext() ? iterB.nextLong() : valueForNoneB, iterC.hasNext() ? iterC.nextLong() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.nextLong(), iterC.hasNext() ? iterC.nextLong() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.nextLong()); } } }); } /** * Zips two long streams into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first long stream * @param b the second long stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final LongStream a, final LongStream b, final long valueForNoneA, final long valueForNoneB, final LongBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three long streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the result * @param a the first long stream * @param b the second long stream * @param c the third long stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final LongStream a, final LongStream b, final LongStream c, final long valueForNoneA, final long valueForNoneB, final long valueForNoneC, final LongTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of long streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valuesForNone are used. * * @param the type of the result * @param c the collection of long streams * @param valuesForNone the values to use if the streams run out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final long[] valuesForNone, final LongNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); if (len != valuesForNone.length) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final LongStream[] ss = c.toArray(new LongStream[len]); final LongIterator[] iters = new LongIterator[len]; for (int i = 0; i < len; i++) { iters[i] = iterate(ss[i]); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; ss[i].close(); } } } return false; } @Override public R next() { final long[] args = new long[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].nextLong(); } else { args[i] = valuesForNone[i]; } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two float arrays into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first float array * @param b the second float array * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final float[] a, final float[] b, final FloatBiFunction zipFunction) { return zip(FloatIteratorEx.of(a), FloatIteratorEx.of(b), zipFunction); } /** * Zips three float arrays into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first float array * @param b the second float array * @param c the third float array * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final float[] a, final float[] b, final float[] c, final FloatTriFunction zipFunction) { return zip(FloatIteratorEx.of(a), FloatIteratorEx.of(b), FloatIteratorEx.of(c), zipFunction); } /** * Zips two float iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first float iterator * @param b the second float iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final FloatIterator a, final FloatIterator b, final FloatBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final FloatIterator iterA = a == null ? FloatIterator.empty() : a; private final FloatIterator iterB = b == null ? FloatIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextFloat(), iterB.nextFloat()); } }); } /** * Zips three float iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first float iterator * @param b the second float iterator * @param c the third float iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final FloatIterator a, final FloatIterator b, final FloatIterator c, final FloatTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final FloatIterator iterA = a == null ? FloatIterator.empty() : a; private final FloatIterator iterB = b == null ? FloatIterator.empty() : b; private final FloatIterator iterC = c == null ? FloatIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextFloat(), iterB.nextFloat(), iterC.nextFloat()); } }); } /** * Zips two float streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first float stream * @param b the second float stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined */ public static Stream zip(final FloatStream a, final FloatStream b, final FloatBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three float streams into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first float stream * @param b the second float stream * @param c the third float stream * @param zipFunction the function to combine triples of values from the streams * @return a stream of combined values */ public static Stream zip(final FloatStream a, final FloatStream b, final FloatStream c, final FloatTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of float streams into a single stream until one of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param c the collection of float streams * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final FloatNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); final FloatIterator[] iters = new FloatIterator[len]; int i = 0; for (final FloatStream s : c) { iters[i++] = iterate(s); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final float[] args = new float[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].nextFloat(); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two float arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first float array * @param b the second float array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final float[] a, final float[] b, final float valueForNoneA, final float valueForNoneB, final FloatBiFunction zipFunction) { return zip(FloatIteratorEx.of(a), FloatIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three float arrays into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first float array * @param b the second float array * @param c the third float array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triples of values from the arrays */ public static Stream zip(final float[] a, final float[] b, final float[] c, final float valueForNoneA, final float valueForNoneB, final float valueForNoneC, final FloatTriFunction zipFunction) { return zip(FloatIteratorEx.of(a), FloatIteratorEx.of(b), FloatIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two float iterators into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first float iterator * @param b the second float iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final FloatIterator a, final FloatIterator b, final float valueForNoneA, final float valueForNoneB, final FloatBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final FloatIterator iterA = a == null ? FloatIterator.empty() : a; private final FloatIterator iterB = b == null ? FloatIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextFloat(), iterB.hasNext() ? iterB.nextFloat() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.nextFloat()); } } }); } /** * Zips three float iterators into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first float iterator * @param b the second float iterator * @param c the third float iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final FloatIterator a, final FloatIterator b, final FloatIterator c, final float valueForNoneA, final float valueForNoneB, final float valueForNoneC, final FloatTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final FloatIterator iterA = a == null ? FloatIterator.empty() : a; private final FloatIterator iterB = b == null ? FloatIterator.empty() : b; private final FloatIterator iterC = c == null ? FloatIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextFloat(), iterB.hasNext() ? iterB.nextFloat() : valueForNoneB, iterC.hasNext() ? iterC.nextFloat() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.nextFloat(), iterC.hasNext() ? iterC.nextFloat() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.nextFloat()); } } }); } /** * Zips two float streams into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first float stream * @param b the second float stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final FloatStream a, final FloatStream b, final float valueForNoneA, final float valueForNoneB, final FloatBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three float streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the result * @param a the first float stream * @param b the second float stream * @param c the third float stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final FloatStream a, final FloatStream b, final FloatStream c, final float valueForNoneA, final float valueForNoneB, final float valueForNoneC, final FloatTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of float streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valuesForNone are used. * * @param the type of the result * @param c the collection of float streams * @param valuesForNone the values to use if the streams run out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final float[] valuesForNone, final FloatNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); if (len != valuesForNone.length) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final FloatStream[] ss = c.toArray(new FloatStream[len]); final FloatIterator[] iters = new FloatIterator[len]; for (int i = 0; i < len; i++) { iters[i] = iterate(ss[i]); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; ss[i].close(); } } } return false; } @Override public R next() { final float[] args = new float[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].nextFloat(); } else { args[i] = valuesForNone[i]; } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two double arrays into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first double array * @param b the second double array * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final double[] a, final double[] b, final DoubleBiFunction zipFunction) { return zip(DoubleIteratorEx.of(a), DoubleIteratorEx.of(b), zipFunction); } /** * Zips three double arrays into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first double array * @param b the second double array * @param c the third double array * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values */ public static Stream zip(final double[] a, final double[] b, final double[] c, final DoubleTriFunction zipFunction) { return zip(DoubleIteratorEx.of(a), DoubleIteratorEx.of(b), DoubleIteratorEx.of(c), zipFunction); } /** * Zips two double iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first double iterator * @param b the second double iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final DoubleIterator a, final DoubleIterator b, final DoubleBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final DoubleIterator iterA = a == null ? DoubleIterator.empty() : a; private final DoubleIterator iterB = b == null ? DoubleIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextDouble(), iterB.nextDouble()); } }); } /** * Zips three double iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first double iterator * @param b the second double iterator * @param c the third double iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final DoubleIterator a, final DoubleIterator b, final DoubleIterator c, final DoubleTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final DoubleIterator iterA = a == null ? DoubleIterator.empty() : a; private final DoubleIterator iterB = b == null ? DoubleIterator.empty() : b; private final DoubleIterator iterC = c == null ? DoubleIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.nextDouble(), iterB.nextDouble(), iterC.nextDouble()); } }); } /** * Zips two double streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first double stream * @param b the second double stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined */ public static Stream zip(final DoubleStream a, final DoubleStream b, final DoubleBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three double streams into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param a the first double stream * @param b the second double stream * @param c the third double stream * @param zipFunction the function to combine triples of values from the streams * @return a stream of combined values */ public static Stream zip(final DoubleStream a, final DoubleStream b, final DoubleStream c, final DoubleTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of double streams into a single stream until one of them runs out of values. * Each set of values is combined into a single value using the supplied zipFunction. * * @param the type of the result * @param c the collection of double streams * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final DoubleNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); final DoubleIterator[] iters = new DoubleIterator[len]; int i = 0; for (final DoubleStream s : c) { iters[i++] = iterate(s); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final double[] args = new double[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].nextDouble(); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two double arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first double array * @param b the second double array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values */ public static Stream zip(final double[] a, final double[] b, final double valueForNoneA, final double valueForNoneB, final DoubleBiFunction zipFunction) { return zip(DoubleIteratorEx.of(a), DoubleIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three double arrays into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first double array * @param b the second double array * @param c the third double array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triples of values from the arrays */ public static Stream zip(final double[] a, final double[] b, final double[] c, final double valueForNoneA, final double valueForNoneB, final double valueForNoneC, final DoubleTriFunction zipFunction) { return zip(DoubleIteratorEx.of(a), DoubleIteratorEx.of(b), DoubleIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two double iterators into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first double iterator * @param b the second double iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values */ public static Stream zip(final DoubleIterator a, final DoubleIterator b, final double valueForNoneA, final double valueForNoneB, final DoubleBiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final DoubleIterator iterA = a == null ? DoubleIterator.empty() : a; private final DoubleIterator iterB = b == null ? DoubleIterator.empty() : b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextDouble(), iterB.hasNext() ? iterB.nextDouble() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.nextDouble()); } } }); } /** * Zips three double iterators into a single stream until all of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA, valueForNoneB or valueForNoneC is used. * * @param the type of the result * @param a the first double iterator * @param b the second double iterator * @param c the third double iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values */ public static Stream zip(final DoubleIterator a, final DoubleIterator b, final DoubleIterator c, final double valueForNoneA, final double valueForNoneB, final double valueForNoneC, final DoubleTriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final DoubleIterator iterA = a == null ? DoubleIterator.empty() : a; private final DoubleIterator iterB = b == null ? DoubleIterator.empty() : b; private final DoubleIterator iterC = c == null ? DoubleIterator.empty() : c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.nextDouble(), iterB.hasNext() ? iterB.nextDouble() : valueForNoneB, iterC.hasNext() ? iterC.nextDouble() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.nextDouble(), iterC.hasNext() ? iterC.nextDouble() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.nextDouble()); } } }); } /** * Zips two double streams into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the result * @param a the first double stream * @param b the second double stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values */ public static Stream zip(final DoubleStream a, final DoubleStream b, final double valueForNoneA, final double valueForNoneB, final DoubleBiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three double streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the result * @param a the first double stream * @param b the second double stream * @param c the third double stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final DoubleStream a, final DoubleStream b, final DoubleStream c, final double valueForNoneA, final double valueForNoneB, final double valueForNoneC, final DoubleTriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of double streams into a single stream until all of them run out of values. * Each set of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valuesForNone are used. * * @param the type of the result * @param c the collection of double streams * @param valuesForNone the values to use if the streams run out of values * @param zipFunction the function to combine sets of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection c, final double[] valuesForNone, final DoubleNFunction zipFunction) { if (N.isEmpty(c)) { return Stream.empty(); } final int len = c.size(); if (len != valuesForNone.length) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final DoubleStream[] ss = c.toArray(new DoubleStream[len]); final DoubleIterator[] iters = new DoubleIterator[len]; for (int i = 0; i < len; i++) { iters[i] = iterate(ss[i]); } return new IteratorStream<>(new ObjIteratorEx() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; ss[i].close(); } } } return false; } @Override public R next() { final double[] args = new double[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].nextDouble(); } else { args[i] = valuesForNone[i]; } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(args); } }).onClose(newCloseHandler(c)); } /** * Zips two arrays into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param
the type of the elements in the first array * @param the type of the elements in the second array * @param the type of the result * @param a the first array * @param b the second array * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values * @see N#zip(Object[], Object[], BiFunction) * @see Fn#pair() * @see Fn#tuple2() */ public static Stream zip(final A[] a, final B[] b, final BiFunction zipFunction) { return zip(ObjIteratorEx.of(a), ObjIteratorEx.of(b), zipFunction); } /** * Zips three arrays into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the first array * @param the type of the elements in the second array * @param the type of the elements in the third array * @param the type of the result * @param a the first array * @param b the second array * @param c the third array * @param zipFunction the function to combine triples of values from the arrays * @return a stream of combined values * @see N#zip(Object[], Object[], Object[], TriFunction) * @see Fn#triple() * @see Fn#tuple3() */ public static Stream zip(final A[] a, final B[] b, final C[] c, final TriFunction zipFunction) { return zip(ObjIteratorEx.of(a), ObjIteratorEx.of(b), ObjIteratorEx.of(c), zipFunction); } /** * Zips two iterables into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param zipFunction the function to combine pairs of values from the iterables * @return a stream of combined values * @see N#zip(Iterable, Iterable, BiFunction) * @see Fn#pair() * @see Fn#tuple2() */ public static Stream zip(final Iterable a, final Iterable b, final BiFunction zipFunction) { return zip(N.iterate(a), N.iterate(b), zipFunction); } /** * Zips three iterables into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the elements in the third iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param c the third iterable * @param zipFunction the function to combine triples of values from the iterables * @return a stream of combined values * @see N#zip(Iterable, Iterable, Iterable, TriFunction) * @see Fn#triple() * @see Fn#tuple3() */ public static Stream zip(final Iterable a, final Iterable b, final Iterable c, final TriFunction zipFunction) { return zip(N.iterate(a), N.iterate(b), N.iterate(c), zipFunction); } /** * Zips two iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values * @see N#zip(Iterable, Iterable, BiFunction) * @see Fn#pair() * @see Fn#tuple2() */ public static Stream zip(final Iterator a, final Iterator b, final BiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final Iterator iterA = a == null ? ObjIterator. empty() : (Iterator) a; private final Iterator iterB = b == null ? ObjIterator. empty() : (Iterator) b; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.next(), iterB.next()); } }); } /** * Zips three iterators into a single stream until one of them runs out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the elements in the third iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param c the third iterator * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values * @see N#zip(Iterable, Iterable, Iterable, TriFunction) * @see Fn#triple() * @see Fn#tuple3() */ public static Stream zip(final Iterator a, final Iterator b, final Iterator c, final TriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final Iterator iterA = a == null ? ObjIterator. empty() : (Iterator) a; private final Iterator iterB = b == null ? ObjIterator. empty() : (Iterator) b; private final Iterator iterC = c == null ? ObjIterator. empty() : (Iterator) c; @Override public boolean hasNext() { return iterA.hasNext() && iterB.hasNext() && iterC.hasNext(); } @Override public R next() { return zipFunction.apply(iterA.next(), iterB.next(), iterC.next()); } }); } /** * Zips two streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the result * @param a the first stream * @param b the second stream * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values * @see N#zip(Iterable, Iterable, BiFunction) * @see Fn#pair() * @see Fn#tuple2() */ public static Stream zip(final Stream a, final Stream b, final BiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three streams into a single stream until one of them runs out of values. * Each triplet of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the elements in the third stream * @param the type of the result * @param a the first stream * @param b the second stream * @param c the third stream * @param zipFunction the function to combine triplets of values from the streams * @return a stream of combined values * @see N#zip(Iterable, Iterable, Iterable, TriFunction) * @see Fn#triple() * @see Fn#tuple3() */ public static Stream zip(final Stream a, final Stream b, final Stream c, final TriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), zipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips multiple streams into a single stream until one of them runs out of values. * Each list of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the streams * @param the type of the result * @param streams the collection of stream to be zipped * @param zipFunction the function to combine lists of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection> streams, final Function, ? extends R> zipFunction) { //noinspection resource return ((Stream) zipIterators(iterateAll(streams), zipFunction)).onClose(newCloseHandler(streams)); } /** * Zips multiple iterables into a single stream until one of them runs out of values. * Each list of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the iterables * @param the type of the result * @param iterables the collection of iterable to be zipped * @param zipFunction the function to combine lists of values from the iterables * @return a stream of combined values */ public static Stream zipIterables(final Collection> iterables, final Function, ? extends R> zipFunction) { return zipIterators(N.iterateAll(iterables), zipFunction); } /** * Zips multiple iterators into a single stream until one of them runs out of values. * Each list of values is combined into a single value using the supplied zipFunction. * * @param the type of the elements in the iterators * @param the type of the result * @param iterators the collection of iterator to be zipped * @param zipFunction the function to combine lists of values from the iterators * @return a stream of combined values */ public static Stream zipIterators(final Collection> iterators, final Function, ? extends R> zipFunction) { if (N.isEmpty(iterators)) { return Stream.empty(); } final int len = iterators.size(); final Iterator[] iters = iterators.toArray(new Iterator[len]); return new IteratorStream<>(new ObjIteratorEx<>() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (!iters[i].hasNext()) { return false; } } return true; } @Override public R next() { final Object[] args = new Object[len]; for (int i = 0; i < len; i++) { args[i] = iters[i].next(); } return zipFunction.apply(Arrays.asList((T[]) args)); } }); } /** * Zips two arrays into a single stream until all of them runs out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the elements in the first array * @param the type of the elements in the second array * @param the type of the result * @param a the first array * @param b the second array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param zipFunction the function to combine pairs of values from the arrays * @return a stream of combined values * @see N#zip(Object[], Object[], Object, Object, BiFunction) */ public static Stream zip(final A[] a, final B[] b, final A valueForNoneA, final B valueForNoneB, final BiFunction zipFunction) { return zip(ObjIteratorEx.of(a), ObjIteratorEx.of(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three arrays into a single stream until all of them run out of values. * Each triplet of values is combined into a single value using the supplied zipFunction. * If one array runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the elements in the first array * @param the type of the elements in the second array * @param the type of the elements in the third array * @param the type of the result * @param a the first array * @param b the second array * @param c the third array * @param valueForNoneA the value to use if the first array runs out of values * @param valueForNoneB the value to use if the second array runs out of values * @param valueForNoneC the value to use if the third array runs out of values * @param zipFunction the function to combine triplets of values from the arrays * @return a stream of combined values * @see N#zip(Object[], Object[], Object[], Object, Object, Object, TriFunction) */ public static Stream zip(final A[] a, final B[] b, final C[] c, final A valueForNoneA, final B valueForNoneB, final C valueForNoneC, final TriFunction zipFunction) { return zip(ObjIteratorEx.of(a), ObjIteratorEx.of(b), ObjIteratorEx.of(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two iterables into a single stream until all of them run out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterable runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param valueForNoneA the value to use if the first iterable runs out of values * @param valueForNoneB the value to use if the second iterable runs out of values * @param zipFunction the function to combine pairs of values from the iterables * @return a stream of combined values * @see N#zip(Iterable, Iterable, Object, Object, BiFunction) */ public static Stream zip(final Iterable a, final Iterable b, final A valueForNoneA, final B valueForNoneB, final BiFunction zipFunction) { return zip(N.iterate(a), N.iterate(b), valueForNoneA, valueForNoneB, zipFunction); } /** * Zips three iterables into a single stream until all of them run out of values. * Each triplet of values is combined into a single value using the supplied zipFunction. * If one iterable runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the elements in the third iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param c the third iterable * @param valueForNoneA the value to use if the first iterable runs out of values * @param valueForNoneB the value to use if the second iterable runs out of values * @param valueForNoneC the value to use if the third iterable runs out of values * @param zipFunction the function to combine triplets of values from the iterables * @return a stream of combined values * @see N#zip(Iterable, Iterable, Iterable, Object, Object, Object, TriFunction) */ public static Stream zip(final Iterable a, final Iterable b, final Iterable c, final A valueForNoneA, final B valueForNoneB, final C valueForNoneC, final TriFunction zipFunction) { return zip(N.iterate(a), N.iterate(b), N.iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } /** * Zips two iterators into a single stream until all of them run out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @return a stream of combined values * @see N#zip(Iterable, Iterable, Object, Object, BiFunction) */ public static Stream zip(final Iterator a, final Iterator b, final A valueForNoneA, final B valueForNoneB, final BiFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final Iterator iterA = a == null ? ObjIterator. empty() : (Iterator) a; private final Iterator iterB = b == null ? ObjIterator. empty() : (Iterator) b; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.next(), iterB.hasNext() ? iterB.next() : valueForNoneB); } else { return zipFunction.apply(valueForNoneA, iterB.next()); } } }); } /** * Zips three iterators into a single stream until all of them run out of values. * Each triple of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the elements in the third iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param c the third iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triples of values from the iterators * @return a stream of combined values * @see N#zip(Iterable, Iterable, Iterable, Object, Object, Object, TriFunction) */ public static Stream zip(final Iterator a, final Iterator b, final Iterator c, final A valueForNoneA, final B valueForNoneB, final C valueForNoneC, final TriFunction zipFunction) { return new IteratorStream<>(new ObjIteratorEx<>() { private final Iterator iterA = a == null ? ObjIterator. empty() : (Iterator) a; private final Iterator iterB = b == null ? ObjIterator. empty() : (Iterator) b; private final Iterator iterC = c == null ? ObjIterator. empty() : (Iterator) c; @Override public boolean hasNext() { return iterA.hasNext() || iterB.hasNext() || iterC.hasNext(); } @Override public R next() { if (iterA.hasNext()) { return zipFunction.apply(iterA.next(), iterB.hasNext() ? iterB.next() : valueForNoneB, iterC.hasNext() ? iterC.next() : valueForNoneC); } else if (iterB.hasNext()) { return zipFunction.apply(valueForNoneA, iterB.next(), iterC.hasNext() ? iterC.next() : valueForNoneC); } else { return zipFunction.apply(valueForNoneA, valueForNoneB, iterC.next()); } } }); } /** * Zips two streams into a single stream until all of them run out of values. * Each pair of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the other, the specified valueForNoneA or valueForNoneB is used. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the result * @param a the first stream * @param b the second stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @return a stream of combined values * @see N#zip(Iterable, Iterable, Object, Object, BiFunction) */ public static Stream zip(final Stream a, final Stream b, final A valueForNoneA, final B valueForNoneB, final BiFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three streams into a single stream until all of them run out of values. * Each triplet of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNoneA, valueForNoneB, or valueForNoneC is used. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the elements in the third stream * @param the type of the result * @param a the first stream * @param b the second stream * @param c the third stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine triplets of values from the streams * @return a stream of combined values * @see N#zip(Iterable, Iterable, Iterable, Object, Object, Object, TriFunction) */ public static Stream zip(final Stream a, final Stream b, final Stream c, final A valueForNoneA, final B valueForNoneB, final C valueForNoneC, final TriFunction zipFunction) { return ((Stream) zip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips multiple streams into a single stream until all of them run out of values. * Each list of values is combined into a single value using the supplied zipFunction. * If one stream runs out of values before the others, the specified valueForNone is used. * * @param the type of the elements in the streams * @param the type of the result * @param streams the collection of stream to be zipped * @param valuesForNone the value to use if any stream runs out of values * @param zipFunction the function to combine lists of values from the streams * @return a stream of combined values */ public static Stream zip(final Collection> streams, final List valuesForNone, final Function, ? extends R> zipFunction) { //noinspection resource return (Stream) zipIterators(iterateAll(streams), valuesForNone, zipFunction).onClose(newCloseHandler(streams)); } /** * Zips multiple iterables into a single stream until all of them runs out of values. * Each list of values is combined into a single value using the supplied zipFunction. * If one iterable runs out of values before the others, the specified valuesForNone is used. * * @param the type of the elements in the iterables * @param the type of the result * @param iterables the collection of iterable to be zipped * @param valuesForNone the values to use if an iterable runs out of values * @param zipFunction the function to combine lists of values from the iterables * @return a stream of combined values */ public static Stream zipIterables(final Collection> iterables, final List valuesForNone, final Function, ? extends R> zipFunction) { return zipIterators(N.iterateAll(iterables), valuesForNone, zipFunction); } /** * Zips multiple iterators into a single stream until all of them runs out of values. * Each list of values is combined into a single value using the supplied zipFunction. * If one iterator runs out of values before the others, the specified valuesForNone is used. * * @param the type of the elements in the iterators * @param the type of the result * @param iterators the collection of iterator to be zipped * @param valuesForNone the values to use if an iterator runs out of values * @param zipFunction the function to combine lists of values from the iterators * @return a stream of combined values */ public static Stream zipIterators(final Collection> iterators, final List valuesForNone, final Function, ? extends R> zipFunction) { if (N.isEmpty(iterators)) { return Stream.empty(); } final int len = iterators.size(); if (len != valuesForNone.size()) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } final Iterator[] iters = iterators.toArray(new Iterator[len]); return new IteratorStream<>(new ObjIteratorEx<>() { @Override public boolean hasNext() { for (int i = 0; i < len; i++) { if (iters[i] != null) { if (iters[i].hasNext()) { return true; } else if (iters[i] != null) { iters[i] = null; } } } return false; } @Override public R next() { final Object[] args = new Object[len]; boolean hasNext = false; for (int i = 0; i < len; i++) { if (iters[i] != null && iters[i].hasNext()) { hasNext = true; args[i] = iters[i].next(); } else { args[i] = valuesForNone.get(i); } } if (!hasNext) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } return zipFunction.apply(Arrays.asList((T[]) args)); } }); } /** * Zips two iterables into a single stream until one of them runs out of values. * Each pair of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param zipFunction the function to combine pairs of values from the iterables * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterable a, final Iterable b, final BiFunction zipFunction, final int maxThreadNumForZipFunction) { return parallelZip(N.iterate(a), N.iterate(b), zipFunction, maxThreadNumForZipFunction); } /** * Zips two iterators into a single stream until one of them runs out of values. * Each pair of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param zipFunction the function to combine pairs of values from the iterators * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterator a, final Iterator b, final BiFunction zipFunction, final int maxThreadNumForZipFunction) { N.checkArgPositive(maxThreadNumForZipFunction, cs.maxThreadNumForZipFunction); if (maxThreadNumForZipFunction == 1) { return zip(a, b, zipFunction); } final Supplier> supplier = () -> { final Iterator iterA = a == null ? ObjIterator.empty() : (Iterator) a; final Iterator iterB = b == null ? ObjIterator.empty() : (Iterator) b; final int maxThreadNum = checkMaxThreadNum(maxThreadNumForZipFunction, 0, DEFAULT_ASYNC_EXECUTOR); final List> iters = new ArrayList<>(maxThreadNum); final MutableBoolean onGoing = MutableBoolean.of(true); for (int i = 0; i < maxThreadNum; i++) { final ObjIteratorEx iter = new ObjIteratorEx<>() { private A nextA = null; private B nextB = null; private boolean hasNext = false; @Override public boolean hasNext() { if (!hasNext && onGoing.isTrue()) { synchronized (iters) { if (iterA.hasNext()) { nextA = iterA.next(); } else { onGoing.setFalse(); return false; } if (iterB.hasNext()) { nextB = iterB.next(); } else { onGoing.setFalse(); return false; } hasNext = true; } } return hasNext; } @Override public R next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return zipFunction.apply(nextA, nextB); } }; iters.add(iter); } return new IteratorStream<>(parallelConcatIterators(iters, iters.size(), 0, false, DEFAULT_ASYNC_EXECUTOR), false, null, null); }; //noinspection resource return just(supplier).flatMap(Supplier::get); } /** * Zips two streams into a single stream until one of them runs out of values. * Each pair of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the result * @param a the first stream * @param b the second stream * @param zipFunction the function to combine pairs of values from the streams * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values * @see Stream#buffered() * @see Stream#buffered(int) */ public static Stream parallelZip(final Stream a, final Stream b, final BiFunction zipFunction, final int maxThreadNumForZipFunction) { return ((Stream) parallelZip(iterate(a), iterate(b), zipFunction, maxThreadNumForZipFunction)).onClose(newCloseHandler(a, b)); } /** * Zips three iterables into a single stream until one of them runs out of values. * Each triplet of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the elements in the third iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param c the third iterable * @param zipFunction the function to combine triplets of values from the iterables * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterable a, final Iterable b, final Iterable c, final TriFunction zipFunction, final int maxThreadNumForZipFunction) { return parallelZip(N.iterate(a), N.iterate(b), N.iterate(c), zipFunction, maxThreadNumForZipFunction); } /** * Zips three iterators into a single stream until one of them runs out of values. * Each triplet of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the elements in the third iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param c the third iterator * @param zipFunction the function to combine triplets of values from the iterators * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterator a, final Iterator b, final Iterator c, final TriFunction zipFunction, final int maxThreadNumForZipFunction) { N.checkArgPositive(maxThreadNumForZipFunction, cs.maxThreadNumForZipFunction); if (maxThreadNumForZipFunction == 1) { return zip(a, b, c, zipFunction); } final Supplier> supplier = () -> { final Iterator iterA = a == null ? ObjIterator.empty() : (Iterator) a; final Iterator iterB = b == null ? ObjIterator.empty() : (Iterator) b; final Iterator iterC = c == null ? ObjIterator.empty() : (Iterator) c; final int maxThreadNum = checkMaxThreadNum(maxThreadNumForZipFunction, 0, DEFAULT_ASYNC_EXECUTOR); final List> iters = new ArrayList<>(maxThreadNum); final MutableBoolean onGoing = MutableBoolean.of(true); for (int i = 0; i < maxThreadNum; i++) { final ObjIteratorEx iter = new ObjIteratorEx<>() { private A nextA = null; private B nextB = null; private C nextC = null; private boolean hasNext = false; @Override public boolean hasNext() { if (!hasNext && onGoing.isTrue()) { synchronized (iters) { if (iterA.hasNext()) { nextA = iterA.next(); } else { onGoing.setFalse(); return false; } if (iterB.hasNext()) { nextB = iterB.next(); } else { onGoing.setFalse(); return false; } if (iterC.hasNext()) { nextC = iterC.next(); } else { onGoing.setFalse(); return false; } hasNext = true; } } return hasNext; } @Override public R next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return zipFunction.apply(nextA, nextB, nextC); } }; iters.add(iter); } return new IteratorStream<>(parallelConcatIterators(iters, iters.size(), 0, false, DEFAULT_ASYNC_EXECUTOR), false, null, null); }; //noinspection resource return just(supplier).flatMap(Supplier::get); } /** * Zips three streams into a single stream until one of them runs out of values. * Each triplet of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the elements in the third stream * @param the type of the result * @param a the first stream * @param b the second stream * @param c the third stream * @param zipFunction the function to combine triplets of values from the streams * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values * @see Stream#buffered() * @see Stream#buffered(int) */ public static Stream parallelZip(final Stream a, final Stream b, final Stream c, final TriFunction zipFunction, final int maxThreadNumForZipFunction) { return ((Stream) parallelZip(iterate(a), iterate(b), iterate(c), zipFunction, maxThreadNumForZipFunction)) .onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips two iterables into a single stream until all of them run out of values. * Each pair of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param valueForNoneA the value to use if the first iterable runs out of values * @param valueForNoneB the value to use if the second iterable runs out of values * @param zipFunction the function to combine pairs of values from the iterables * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterable a, final Iterable b, final A valueForNoneA, final B valueForNoneB, final BiFunction zipFunction, final int maxThreadNumForZipFunction) { return parallelZip(N.iterate(a), N.iterate(b), valueForNoneA, valueForNoneB, zipFunction, maxThreadNumForZipFunction); } /** * Zips two iterators into a single stream until all of them run out of values. * Each pair of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param zipFunction the function to combine pairs of values from the iterators * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterator a, final Iterator b, final A valueForNoneA, final B valueForNoneB, final BiFunction zipFunction, final int maxThreadNumForZipFunction) { N.checkArgPositive(maxThreadNumForZipFunction, cs.maxThreadNumForZipFunction); if (maxThreadNumForZipFunction == 1) { return zip(a, b, valueForNoneA, valueForNoneB, zipFunction); } final Supplier> supplier = () -> { final Iterator iterA = a == null ? ObjIterator.empty() : (Iterator) a; final Iterator iterB = b == null ? ObjIterator.empty() : (Iterator) b; final int maxThreadNum = checkMaxThreadNum(maxThreadNumForZipFunction, 0, DEFAULT_ASYNC_EXECUTOR); final List> iters = new ArrayList<>(maxThreadNum); final MutableBoolean onGoing = MutableBoolean.of(true); for (int i = 0; i < maxThreadNum; i++) { final ObjIteratorEx iter = new ObjIteratorEx<>() { private A nextA = null; private B nextB = null; private boolean hasNext = false; @Override public boolean hasNext() { if (!hasNext && onGoing.isTrue()) { synchronized (iters) { if (iterA.hasNext()) { nextA = iterA.next(); hasNext = true; } else { nextA = valueForNoneA; } if (iterB.hasNext()) { nextB = iterB.next(); hasNext = true; } else { nextB = valueForNoneB; } if (!hasNext) { onGoing.setFalse(); } } } return hasNext; } @Override public R next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return zipFunction.apply(nextA, nextB); } }; iters.add(iter); } return new IteratorStream<>(parallelConcatIterators(iters, iters.size(), 0, false, DEFAULT_ASYNC_EXECUTOR), false, null, null); }; //noinspection resource return just(supplier).flatMap(Supplier::get); } /** * Zips two streams into a single stream until all of them run out of values. * Each pair of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the result * @param a the first stream * @param b the second stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param zipFunction the function to combine pairs of values from the streams * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @see Stream#buffered() * @see Stream#buffered(int) */ public static Stream parallelZip(final Stream a, final Stream b, final A valueForNoneA, final B valueForNoneB, final BiFunction zipFunction, final int maxThreadNumForZipFunction) { return ((Stream) parallelZip(iterate(a), iterate(b), valueForNoneA, valueForNoneB, zipFunction, maxThreadNumForZipFunction)) .onClose(newCloseHandler(a, b)); } /** * Zips three iterables into a single stream until all of them run out of values. * Each triplet of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterable * @param the type of the elements in the second iterable * @param the type of the elements in the third iterable * @param the type of the result * @param a the first iterable * @param b the second iterable * @param c the third iterable * @param valueForNoneA the value to use if the first iterable runs out of values * @param valueForNoneB the value to use if the second iterable runs out of values * @param valueForNoneC the value to use if the third iterable runs out of values * @param zipFunction the function to combine triplets of values from the iterables * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterable a, final Iterable b, final Iterable c, final A valueForNoneA, final B valueForNoneB, final C valueForNoneC, final TriFunction zipFunction, final int maxThreadNumForZipFunction) { return parallelZip(N.iterate(a), N.iterate(b), N.iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction, maxThreadNumForZipFunction); } /** * Zips three iterators into a single stream until all of them run out of values. * Each triplet of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first iterator * @param the type of the elements in the second iterator * @param the type of the elements in the third iterator * @param the type of the result * @param a the first iterator * @param b the second iterator * @param c the third iterator * @param valueForNoneA the value to use if the first iterator runs out of values * @param valueForNoneB the value to use if the second iterator runs out of values * @param valueForNoneC the value to use if the third iterator runs out of values * @param zipFunction the function to combine triplets of values from the iterators * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZip(final Iterator a, final Iterator b, final Iterator c, final A valueForNoneA, final B valueForNoneB, final C valueForNoneC, final TriFunction zipFunction, final int maxThreadNumForZipFunction) { N.checkArgPositive(maxThreadNumForZipFunction, cs.maxThreadNumForZipFunction); if (maxThreadNumForZipFunction == 1) { return zip(a, b, c, valueForNoneA, valueForNoneB, valueForNoneC, zipFunction); } final Supplier> supplier = () -> { final Iterator iterA = a == null ? ObjIterator.empty() : (Iterator) a; final Iterator iterB = b == null ? ObjIterator.empty() : (Iterator) b; final Iterator iterC = c == null ? ObjIterator.empty() : (Iterator) c; final int maxThreadNum = checkMaxThreadNum(maxThreadNumForZipFunction, 0, DEFAULT_ASYNC_EXECUTOR); final List> iters = new ArrayList<>(maxThreadNum); final MutableBoolean onGoing = MutableBoolean.of(true); for (int i = 0; i < maxThreadNum; i++) { final ObjIteratorEx iter = new ObjIteratorEx<>() { private A nextA = null; private B nextB = null; private C nextC = null; private boolean hasNext = false; @Override public boolean hasNext() { if (!hasNext && onGoing.isTrue()) { synchronized (iters) { if (iterA.hasNext()) { nextA = iterA.next(); hasNext = true; } else { nextA = valueForNoneA; } if (iterB.hasNext()) { nextB = iterB.next(); hasNext = true; } else { nextB = valueForNoneB; } if (iterC.hasNext()) { nextC = iterC.next(); hasNext = true; } else { nextC = valueForNoneC; } if (!hasNext) { onGoing.setFalse(); } } } return hasNext; } @Override public R next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return zipFunction.apply(nextA, nextB, nextC); } }; iters.add(iter); } return new IteratorStream<>(parallelConcatIterators(iters, iters.size(), 0, false, DEFAULT_ASYNC_EXECUTOR), false, null, null); }; //noinspection resource return just(supplier).flatMap(Supplier::get); } /** * Zips three streams into a single stream until all of them run out of values. * Each triplet of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the first stream * @param the type of the elements in the second stream * @param the type of the elements in the third stream * @param the type of the result * @param a the first stream * @param b the second stream * @param c the third stream * @param valueForNoneA the value to use if the first stream runs out of values * @param valueForNoneB the value to use if the second stream runs out of values * @param valueForNoneC the value to use if the third stream runs out of values * @param zipFunction the function to combine triplets of values from the streams * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values * @see Stream#buffered() * @see Stream#buffered(int) */ public static Stream parallelZip(final Stream a, final Stream b, final Stream c, final A valueForNoneA, final B valueForNoneB, final C valueForNoneC, final TriFunction zipFunction, final int maxThreadNumForZipFunction) { return ((Stream) parallelZip(iterate(a), iterate(b), iterate(c), valueForNoneA, valueForNoneB, valueForNoneC, zipFunction, maxThreadNumForZipFunction)).onClose(newCloseHandler(Array.asList(a, b, c))); } /** * Zips a collection of stream into a single stream until one of them runs out of values. * Each list of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the streams * @param the type of the result * @param streams the collection of stream * @param zipFunction the function to combine lists of values from the streams * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values * @see Stream#buffered() * @see Stream#buffered(int) */ public static Stream parallelZip(final Collection> streams, final Function, ? extends R> zipFunction, final int maxThreadNumForZipFunction) { //noinspection resource return ((Stream) parallelZipIterators(iterateAll(streams), zipFunction, maxThreadNumForZipFunction)).onClose(newCloseHandler(streams)); } /** * Zips multiple streams into a single stream until all of them run out of values. * Each list of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the streams * @param the type of the result * @param streams the collection of stream * @param valuesForNone the values to use if any stream runs out of values * @param zipFunction the function to combine lists of values from the streams * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values * @see Stream#buffered() * @see Stream#buffered(int) */ public static Stream parallelZip(final Collection> streams, final List valuesForNone, final Function, ? extends R> zipFunction, final int maxThreadNumForZipFunction) { //noinspection resource return ((Stream) parallelZipIterators(iterateAll(streams), valuesForNone, zipFunction, maxThreadNumForZipFunction)) .onClose(newCloseHandler(streams)); } /** * Zips multiple iterables into a single stream until one of them run out of values. * Each list of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the iterables * @param the type of the result * @param iterables the collection of Iterable * @param zipFunction the function to combine lists of values from the iterables * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values * @see N#iterateAll(Collection) */ public static Stream parallelZipIterables(final Collection> iterables, final Function, ? extends R> zipFunction, final int maxThreadNumForZipFunction) { return parallelZipIterators(N.iterateAll(iterables), zipFunction, maxThreadNumForZipFunction); } /** * Zips multiple iterables into a single stream until all of them run out of values. * Each list of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the iterables * @param the type of the result * @param iterables the collection of Iterable * @param valuesForNone the values to use if any iterable runs out of values * @param zipFunction the function to combine lists of values from the iterables * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values * @see N#iterateAll(Collection) */ public static Stream parallelZipIterables(final Collection> iterables, final List valuesForNone, final Function, ? extends R> zipFunction, final int maxThreadNumForZipFunction) { return parallelZipIterators(N.iterateAll(iterables), valuesForNone, zipFunction, maxThreadNumForZipFunction); } /** * Zips multiple iterators into a single stream until one of them run out of values. * Each list of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the iterators * @param the type of the result * @param iterators the collection of iterator * @param zipFunction the function to combine lists of values from the iterators * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZipIterators(final Collection> iterators, final Function, ? extends R> zipFunction, final int maxThreadNumForZipFunction) { N.checkArgPositive(maxThreadNumForZipFunction, cs.maxThreadNumForZipFunction); if (N.isEmpty(iterators)) { return Stream.empty(); } else if (maxThreadNumForZipFunction == 1) { return zipIterators(iterators, zipFunction); } final Supplier> supplier = () -> { final int len = iterators.size(); final Iterator[] iterArray = iterators.toArray(new Iterator[len]); for (int i = 0; i < len; i++) { if (iterArray[i] == null) { iterArray[i] = ObjIterator.empty(); } } final int maxThreadNum = checkMaxThreadNum(maxThreadNumForZipFunction, 0, DEFAULT_ASYNC_EXECUTOR); final List> iters = new ArrayList<>(maxThreadNum); final MutableBoolean onGoing = MutableBoolean.of(true); for (int i = 0; i < maxThreadNum; i++) { final ObjIteratorEx iter = new ObjIteratorEx<>() { private Object[] next = null; private boolean hasNext = false; @Override public boolean hasNext() { if (!hasNext && onGoing.isTrue()) { synchronized (iters) { next = new Object[len]; for (int i = 0; i < len; i++) { if (iterArray[i].hasNext()) { next[i] = iterArray[i].next(); } else { onGoing.setFalse(); return false; } } hasNext = true; } } return hasNext; } @Override public R next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return zipFunction.apply(Arrays.asList((T[]) next)); } }; iters.add(iter); } return new IteratorStream<>(parallelConcatIterators(iters, iters.size(), 0, false, DEFAULT_ASYNC_EXECUTOR), false, null, null); }; //noinspection resource return just(supplier).flatMap(Supplier::get); } /** * Zips multiple iterators into a single stream until all of them run out of values. * Each list of values is combined into a single value by the provided {@code zipFunction}, which will be executed by multiple threads. * * @param the type of the elements in the iterators * @param the type of the result * @param iterators the collection of iterator * @param valuesForNone the values to use if any iterator runs out of values * @param zipFunction the function to combine lists of values from the iterators * @param maxThreadNumForZipFunction the max thread number for the zipFunction. * @return a stream of combined values */ public static Stream parallelZipIterators(final Collection> iterators, final List valuesForNone, final Function, ? extends R> zipFunction, final int maxThreadNumForZipFunction) { N.checkArgPositive(maxThreadNumForZipFunction, cs.maxThreadNumForZipFunction); if (N.size(iterators) != N.size(valuesForNone)) { throw new IllegalArgumentException("The size of 'valuesForNone' must be same as the size of the collection of iterator"); } if (N.isEmpty(iterators)) { return Stream.empty(); } else if (maxThreadNumForZipFunction == 1) { return zipIterators(iterators, valuesForNone, zipFunction); } final Supplier> supplier = () -> { final int len = iterators.size(); final Iterator[] iterArray = iterators.toArray(new Iterator[len]); for (int i = 0; i < len; i++) { if (iterArray[i] == null) { iterArray[i] = ObjIterator.empty(); } } final int maxThreadNum = checkMaxThreadNum(maxThreadNumForZipFunction, 0, DEFAULT_ASYNC_EXECUTOR); final List> iters = new ArrayList<>(maxThreadNum); final MutableBoolean onGoing = MutableBoolean.of(true); for (int i = 0; i < maxThreadNum; i++) { final ObjIteratorEx iter = new ObjIteratorEx<>() { private Object[] next = null; private boolean hasNext = false; @Override public boolean hasNext() { if (!hasNext && onGoing.isTrue()) { synchronized (iters) { next = new Object[len]; for (int i = 0; i < len; i++) { if (iterArray[i].hasNext()) { next[i] = iterArray[i].next(); hasNext = true; } else { next[i] = valuesForNone.get(i); } } if (!hasNext) { onGoing.setFalse(); } } } return hasNext; } @Override public R next() { if (!hasNext()) { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } hasNext = false; return zipFunction.apply(Arrays.asList((T[]) next)); } }; iters.add(iter); } return new IteratorStream<>(parallelConcatIterators(iters, iters.size(), 0, false, DEFAULT_ASYNC_EXECUTOR), false, null, null); }; //noinspection resource return just(supplier).flatMap(Supplier::get); } /** * Merges two arrays into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the two arrays should be selected next. * * @param the type of the elements in the arrays * @param a the first array to be merged. It should be ordered. * @param b the second array to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the two arrays * @return a stream containing the merged elements from the two arrays * @see N#merge(Object[], Object[], BiFunction) */ public static Stream merge(final T[] a, final T[] b, final BiFunction nextSelector) { if (N.isEmpty(a)) { return of(b); } else if (N.isEmpty(b)) { return of(a); } return new IteratorStream<>(new ObjIteratorEx<>() { private final int lenA = a.length; private final int lenB = b.length; private int cursorA = 0; private int cursorB = 0; @Override public boolean hasNext() { return cursorA < lenA || cursorB < lenB; } @Override public T next() { if (cursorA < lenA) { if ((cursorB >= lenB) || (nextSelector.apply(a[cursorA], b[cursorB]) == MergeResult.TAKE_FIRST)) { return a[cursorA++]; } else { return b[cursorB++]; } } else if (cursorB < lenB) { return b[cursorB++]; } else { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } } }); } /** * Merges three arrays into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the three arrays should be selected next. * * @param the type of the elements in the arrays * @param a the first array to be merged. It should be ordered. * @param b the second array to be merged. It should be ordered. * @param c the third array to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the three arrays * @return a stream containing the merged elements from the three arrays * @see N#merge(Object[], Object[], BiFunction) */ public static Stream merge(final T[] a, final T[] b, final T[] c, final BiFunction nextSelector) { //noinspection resource return merge(merge(a, b, nextSelector).iteratorEx(), N.iterate(c), nextSelector); } /** * Merges two iterables into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the two iterables should be selected next. * * @param the type of the elements in the iterables * @param a the first iterable to be merged. It should be ordered. * @param b the second iterable to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the two iterables * @return a stream containing the merged elements from the two iterables * @see N#merge(Iterable, Iterable, BiFunction) */ public static Stream merge(final Iterable a, final Iterable b, final BiFunction nextSelector) { return merge(N.iterate(a), N.iterate(b), nextSelector); } /** * Merges three iterables into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the three iterables should be selected next. * * @param the type of the elements in the iterables * @param a the first iterable to be merged. It should be ordered. * @param b the second iterable to be merged. It should be ordered. * @param c the third iterable to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the three iterables * @return a stream containing the merged elements from the three iterables * @see N#merge(Iterable, Iterable, BiFunction) */ public static Stream merge(final Iterable a, final Iterable b, final Iterable c, final BiFunction nextSelector) { return merge(N.iterate(a), N.iterate(b), N.iterate(c), nextSelector); } /** * Merges two iterators into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the two iterators should be selected next. * * @param the type of the elements in the iterators * @param a the first iterator to be merged. It should be ordered. * @param b the second iterator to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the two iterators * @return a stream containing the merged elements from the two iterators * @see N#merge(Iterable, Iterable, BiFunction) */ public static Stream merge(final Iterator a, final Iterator b, final BiFunction nextSelector) { return new IteratorStream<>(new ObjIteratorEx<>() { private final Iterator iterA = a == null ? ObjIterator. empty() : (Iterator) a; private final Iterator iterB = b == null ? ObjIterator. empty() : (Iterator) b; private T nextA = null; private T nextB = null; private boolean hasNextA = false; private boolean hasNextB = false; @Override public boolean hasNext() { return hasNextA || hasNextB || iterA.hasNext() || iterB.hasNext(); } @Override public T next() { if (hasNextA) { if (iterB.hasNext()) { if (nextSelector.apply(nextA, (nextB = iterB.next())) == MergeResult.TAKE_FIRST) { hasNextA = false; hasNextB = true; return nextA; } else { return nextB; } } else { hasNextA = false; return nextA; } } else if (hasNextB) { if (iterA.hasNext()) { if (nextSelector.apply((nextA = iterA.next()), nextB) == MergeResult.TAKE_FIRST) { return nextA; } else { hasNextA = true; hasNextB = false; return nextB; } } else { hasNextB = false; return nextB; } } else if (iterA.hasNext()) { if (iterB.hasNext()) { if (nextSelector.apply((nextA = iterA.next()), (nextB = iterB.next())) == MergeResult.TAKE_FIRST) { hasNextB = true; return nextA; } else { hasNextA = true; return nextB; } } else { return iterA.next(); } } else if (iterB.hasNext()) { return iterB.next(); } else { throw new NoSuchElementException(ERROR_MSG_FOR_NO_SUCH_EX); } } }); } /** * Merges three iterators into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the three iterators should be selected next. * * @param the type of the elements in the iterators * @param a the first iterator to be merged. It should be ordered. * @param b the second iterator to be merged. It should be ordered. * @param c the third iterator to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the three iterators * @return a stream containing the merged elements from the three iterators * @see N#merge(Iterable, Iterable, BiFunction) */ public static Stream merge(final Iterator a, final Iterator b, final Iterator c, final BiFunction nextSelector) { //noinspection resource return merge(merge(a, b, nextSelector).iteratorEx(), c, nextSelector); } /** * Merges two streams into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the two streams should be selected next. * * @param the type of the elements in the streams * @param a the first stream to be merged. It should be ordered. * @param b the second stream to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the two streams * @return a stream containing the merged elements from the two streams */ public static Stream merge(final Stream a, final Stream b, final BiFunction nextSelector) { return merge(iterate(a), iterate(b), nextSelector).onClose(newCloseHandler(a, b)); } /** * Merges three streams into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the three streams should be selected next. * * @param the type of the elements in the streams * @param a the first stream to be merged. It should be ordered. * @param b the second stream to be merged. It should be ordered. * @param c the third stream to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the three streams * @return a stream containing the merged elements from the three streams */ public static Stream merge(final Stream a, final Stream b, final Stream c, final BiFunction nextSelector) { return merge(merge(a, b, nextSelector), c, nextSelector); } /** * Merges a collection of stream into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the streams should be selected next. * * @param the type of the elements in the streams * @param streams the collection of stream to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the streams * @return a stream containing the merged elements from the collection of stream */ public static Stream merge(final Collection> streams, final BiFunction nextSelector) throws IllegalArgumentException { N.checkArgNotNull(nextSelector); if (N.isEmpty(streams)) { return empty(); } else if (streams.size() == 1) { return (Stream) streams.iterator().next(); } else if (streams.size() == 2) { final Iterator> iter = streams.iterator(); return merge(iter.next(), iter.next(), nextSelector); } final Iterator> iter = streams.iterator(); Stream result = merge(iter.next(), iter.next(), nextSelector); while (iter.hasNext()) { result = merge(result, iter.next(), nextSelector); } return result; } /** * Merges a collection of iterable into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the iterables should be selected next. * * @param the type of the elements in the iterables * @param iterables the collection of iterable to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the iterables * @return a stream containing the merged elements from the collection of iterable */ public static Stream mergeIterables(final Collection> iterables, final BiFunction nextSelector) throws IllegalArgumentException { return mergeIterators(N.iterateAll(iterables), nextSelector); } /** * Merges a collection of iterator into a single stream based on the provided nextSelector function. * The nextSelector function determines which element from the iterators should be selected next. * * @param the type of the elements in the iterators * @param iterators the collection of iterator to be merged. It should be ordered. * @param nextSelector a function that determines the next element to be selected from the iterators * @return a stream containing the merged elements from the collection of iterator */ public static Stream mergeIterators(final Collection> iterators, final BiFunction nextSelector) throws IllegalArgumentException { N.checkArgNotNull(nextSelector); if (N.isEmpty(iterators)) { return empty(); } else if (iterators.size() == 1) { return of(iterators.iterator().next()); } else if (iterators.size() == 2) { final Iterator> iter = iterators.iterator(); return merge(iter.next(), iter.next(), nextSelector); } final Iterator> iter = iterators.iterator(); Stream result = merge(iter.next(), iter.next(), nextSelector); while (iter.hasNext()) { result = merge(result.iteratorEx(), iter.next(), nextSelector); } return result; } /** * Merges a collection of stream into a single stream in parallel. * All the elements from each input stream will be merged into two queues by multiple threads first. * Then these two new queues will be merged into one stream. * This method is not totally lazy evaluation and may cause out of memory error if there are too many elements merged into the two new queues. * Consider using {@code merge}, which is totally lazy evaluation. * * @param the type of the elements in the streams * @param streams the collection of stream to be merged. It should be ordered. * @param nextSelector a function to determine which element should be selected as next element. * @return a stream containing the merged elements from the collection of stream * @see #merge(Collection, BiFunction) */ public static Stream parallelMerge(final Collection> streams, final BiFunction nextSelector) { return parallelMerge(streams, nextSelector, DEFAULT_MAX_THREAD_NUM); } /** * Merges a collection of stream into a single stream in parallel. * All the elements from each input stream will be merged into two queues by multiple threads first. * Then these two new queues will be merged into one stream. * This method is not totally lazy evaluation and may cause out of memory error if there are too many elements merged into the two new queues. * Consider using {@code merge}, which is totally lazy evaluation. * * @param the type of the elements in the streams * @param streams the collection of stream to be merged. It should be ordered. * @param nextSelector a function to determine which element should be selected as next element. * @param maxThreadNum the max thread number for the parallel merge. * @return a stream containing the merged elements from the collection of stream * @see #merge(Collection, BiFunction) */ public static Stream parallelMerge(final Collection> streams, final BiFunction nextSelector, final int maxThreadNum) throws IllegalArgumentException { N.checkArgument(maxThreadNum > 0, "'maxThreadNum' must not less than 1"); if (maxThreadNum == 1) { return merge(streams, nextSelector); } else if (N.isEmpty(streams)) { return empty(); } else if (streams.size() == 1) { return (Stream) streams.iterator().next(); } else if (streams.size() == 2) { final Iterator> iter = streams.iterator(); return merge(iter.next(), iter.next(), nextSelector); } else if (streams.size() == 3) { final Iterator> iter2 = streams.iterator(); //noinspection resource return merge(merge(iter2.next(), iter2.next(), nextSelector).buffered(), iter2.next(), nextSelector); } final Supplier> supplier = () -> { final Queue> queue = N.newLinkedList(); for (final Stream e : streams) { queue.add((Stream) e); } final Holder eHolder = new Holder<>(); final MutableInt cnt = MutableInt.of(streams.size()); final List> futureList = new ArrayList<>(streams.size() - 1); final int threadNum = N.min(maxThreadNum, streams.size() / 2); AsyncExecutor asyncExecutorToUse = checkAsyncExecutor(DEFAULT_ASYNC_EXECUTOR, threadNum, 0); // TODO Warning: Dead lock could happen if the total thread number started by this stream and its upstream is bigger than StreamBase.CORE_THREAD_POOL_SIZE(1024). // If the total thread number started by this stream and its down stream is big, please specified its owner {@code Executor} by {@code parallel(..., Executor)}. // UPDATE: this dead lock problem has been resolved by using BaseStream.execute(...) for (int i = 0; i < threadNum; i++) { asyncExecutorToUse = execute(asyncExecutorToUse, threadNum, 0, i, futureList, () -> { Stream a = null; Stream b = null; Stream c = null; try { while (eHolder.value() == null) { synchronized (queue) { if (cnt.value() > 3 && queue.size() > 1) { a = queue.poll(); b = queue.poll(); cnt.decrement(); } else { break; } } c = Stream.of((T[]) merge(a, b, nextSelector).toArray()); synchronized (queue) { queue.offer(c); } } } catch (final Throwable e) { // NOSONAR setError(eHolder, e); } }); } completeAndShutdownTempExecutor(futureList, eHolder, streams, asyncExecutorToUse); if (queue.size() == 2) { return merge(queue.poll(), queue.poll(), nextSelector); } else if (queue.size() == 3) { return merge(merge(queue.poll(), queue.poll(), nextSelector).buffered(), queue.poll(), nextSelector); } else { throw new IllegalStateException("Queue size should be 2 or 3 but it's " + queue.size()); } }; //noinspection resource return Stream.just(supplier).flatMap(Supplier::get); } /** * Merges a collection of iterable into a single stream in parallel. * All the elements from each input iterable will be merged into two queues by multiple threads first. * Then these two new queues will be merged into one stream. * This method is not totally lazy evaluation and may cause out of memory error if there are too many elements merged into the two new queues. * Consider using {@code merge}, which is totally lazy evaluation. * * @param the type of the elements in the iterables * @param iterables the collection of iterable to be merged. It should be ordered. * @param nextSelector a function to determine which element should be selected as next element. * @return a stream containing the merged elements from the collection of iterable * @see #mergeIterables(Collection, BiFunction) */ public static Stream parallelMergeIterables(final Collection> iterables, final BiFunction nextSelector) { return parallelMergeIterables(iterables, nextSelector, DEFAULT_MAX_THREAD_NUM); } /** * Merges a collection of iterable into a single stream in parallel. * All the elements from each input iterable will be merged into two queues by multiple threads first. * Then these two new queues will be merged into one stream. * This method is not totally lazy evaluation and may cause out of memory error if there are too many elements merged into the two new queues. * Consider using {@code merge}, which is totally lazy evaluation. * * @param the type of the elements in the iterables * @param iterables the collection of iterable to be merged. It should be ordered. * @param nextSelector a function to determine which element should be selected as next element. * @param maxThreadNum the max thread number for the parallel merge. * @return a stream containing the merged elements from the collection of iterable * @see #mergeIterables(Collection, BiFunction) */ public static Stream parallelMergeIterables(final Collection> iterables, final BiFunction nextSelector, final int maxThreadNum) throws IllegalArgumentException { return parallelMergeIterators(N.iterateAll(iterables), nextSelector, maxThreadNum); } /** * Merges a collection of iterator into a single stream in parallel. * All the elements from each input iterator will be merged into two queues by multiple threads first. * Then these two new queues will be merged into one stream. * This method is not totally lazy evaluation and may cause out of memory error if there are too many elements merged into the two new queues. * Consider using {@code merge}, which is totally lazy evaluation. * * @param the type of the elements in the iterators * @param iterators the collection of iterator to be merged. It should be ordered. * @param nextSelector a function to determine which element should be selected as next element. * @return a stream containing the merged elements from the collection of iterator * @see #mergeIterators(Collection, BiFunction) */ public static Stream parallelMergeIterators(final Collection> iterators, final BiFunction nextSelector) { return parallelMergeIterators(iterators, nextSelector, DEFAULT_MAX_THREAD_NUM); } /** * Merges a collection of iterator into a single stream in parallel. * All the elements from each input iterator will be merged into two queues by multiple threads first. * Then these two new queues will be merged into one stream. * This method is not totally lazy evaluation and may cause out of memory error if there are too many elements merged into the two new queues. * Consider using {@code merge}, which is totally lazy evaluation. * * @param the type of the elements in the iterators * @param iterators the collection of iterator to be merged. It should be ordered. * @param nextSelector a function to determine which element should be selected as next element. * @param maxThreadNum the max thread number for the parallel merge. * @return a stream containing the merged elements from the collection of iterator * @see #mergeIterators(Collection, BiFunction) */ public static Stream parallelMergeIterators(final Collection> iterators, final BiFunction nextSelector, final int maxThreadNum) throws IllegalArgumentException { N.checkArgument(maxThreadNum > 0, "'maxThreadNum' must not less than 1"); if (maxThreadNum == 1) { return mergeIterators(iterators, nextSelector); } else if (N.isEmpty(iterators)) { return empty(); } else if (iterators.size() == 1) { return of(iterators.iterator().next()); } else if (iterators.size() == 2) { final Iterator> iter = iterators.iterator(); return merge(iter.next(), iter.next(), nextSelector); } else if (iterators.size() == 3) { final Iterator> iter2 = iterators.iterator(); //noinspection resource return merge(merge(iter2.next(), iter2.next(), nextSelector).buffered().iteratorEx(), iter2.next(), nextSelector); } final Supplier> supplier = () -> { final Queue> queue = N.newLinkedList(iterators); final Holder eHolder = new Holder<>(); final MutableInt cnt = MutableInt.of(iterators.size()); final List> futureList = new ArrayList<>(iterators.size() - 1); final int threadNum = N.min(maxThreadNum, iterators.size() / 2); AsyncExecutor asyncExecutorToUse = checkAsyncExecutor(DEFAULT_ASYNC_EXECUTOR, threadNum, 0); // TODO Warning: Dead lock could happen if the total thread number started by this stream and its upstream is bigger than StreamBase.CORE_THREAD_POOL_SIZE(1024). // If the total thread number started by this stream and its down stream is big, please specified its owner {@code Executor} by {@code parallel(..., Executor)}. // UPDATE: this dead lock problem has been resolved by using BaseStream.execute(...) for (int i = 0; i < threadNum; i++) { asyncExecutorToUse = execute(asyncExecutorToUse, threadNum, 0, i, futureList, () -> { Iterator a = null; Iterator b = null; Iterator c = null; try { while (eHolder.value() == null) { synchronized (queue) { if (cnt.value() > 3 && queue.size() > 1) { a = queue.poll(); b = queue.poll(); cnt.decrement(); } else { break; } } //noinspection resource c = ObjIteratorEx.of((T[]) merge(a, b, nextSelector).toArray()); synchronized (queue) { queue.offer(c); } } } catch (final Throwable e) { // NOSONAR setError(eHolder, e); } }); } completeAndShutdownTempExecutor(futureList, eHolder, null, asyncExecutorToUse); if (queue.size() == 2) { return merge(queue.poll(), queue.poll(), nextSelector); } else if (queue.size() == 3) { //noinspection resource return merge(merge(queue.poll(), queue.poll(), nextSelector).buffered().iteratorEx(), queue.poll(), nextSelector); } else { throw new IllegalStateException("Queue size should be 2 or 3 but it's " + queue.size()); } }; //noinspection resource return Stream.just(supplier).flatMap(Supplier::get); } public abstract static class StreamEx extends Stream { private StreamEx(final boolean sorted, final Comparator cmp, final Collection closeHandlers) { //NOSONAR super(sorted, cmp, closeHandlers); // Factory class. } } }