All Downloads are FREE. Search and download functionalities are using the official Maven repository.

cyclops.companion.Streams Maven / Gradle / Ivy

There is a newer version: 10.4.1
Show newest version
package cyclops.companion;

import com.oath.cyclops.internal.stream.*;
import com.oath.cyclops.internal.stream.operators.DebounceOperator;
import com.oath.cyclops.internal.stream.operators.MultiReduceOperator;
import com.oath.cyclops.internal.stream.operators.OnePerOperator;
import com.oath.cyclops.internal.stream.operators.RecoverOperator;
import com.oath.cyclops.internal.stream.spliterators.*;
import com.oath.cyclops.types.persistent.PersistentCollection;
import com.oath.cyclops.types.stream.Connectable;
import com.oath.cyclops.types.stream.NonPausableConnectable;
import com.oath.cyclops.types.traversable.Traversable;
import cyclops.control.Eval;
import cyclops.control.Maybe;
import cyclops.control.Option;
import cyclops.data.Seq;
import cyclops.control.Either;

import cyclops.data.Vector;

import cyclops.function.*;

import cyclops.reactive.ReactiveSeq;
import com.oath.cyclops.util.box.Mutable;

import com.oath.cyclops.types.stream.PausableConnectable;
import com.oath.cyclops.util.ExceptionSoftener;

import lombok.AllArgsConstructor;
import lombok.experimental.UtilityClass;
import lombok.val;
import cyclops.data.tuple.Tuple;
import cyclops.data.tuple.Tuple2;
import cyclops.data.tuple.Tuple3;
import cyclops.data.tuple.Tuple4;

import com.oath.cyclops.types.persistent.PersistentList;
import org.reactivestreams.Subscription;

import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.LockSupport;
import java.util.function.*;
import java.util.stream.*;

/**
 * Static utility methods for working with Java  8 Streams
 *
 * @author johnmcclean
 *
 */
@UtilityClass
public class Streams {



    public static  ReactiveSeq> combinations(int size,Object[] a){

        final int fromIndex = 0;
        final int toIndex = a.length;

        final Iterator> iter = new Iterator>() {
            private final int[] indices = IntStream.range(fromIndex, fromIndex + size).toArray();

            @Override
            public boolean hasNext() {
                return indices[0] <= toIndex - size;
            }

            @Override
            public ReactiveSeq next() {
                final List result = new ArrayList<>(size);

                for (int idx : indices) {
                    result.add((T)a[idx]);
                }

                if (++indices[size - 1] == toIndex) {
                    for (int i = size - 1; i > 0; i--) {
                        if (indices[i] > toIndex - (size - i)) {
                            indices[i - 1]++;

                            for (int j = i; j < size; j++) {
                                indices[j] = indices[j - 1] + 1;
                            }
                        }
                    }
                }

                return ReactiveSeq.fromList(result);
            }
        };

        return ReactiveSeq.fromIterator(iter);
    }
    public static  ReactiveSeq> permutations(Object[] a){



        final Iterator> iter = new Iterator>() {
            private final int[] indices = IntStream.range(0, a.length).toArray();
            private Option> next = Maybe.fromEval(Eval.later(this::first)).flatMap(i->i);

            @Override
            public boolean hasNext() {
                return next.isPresent();
            }

            @Override
            public ReactiveSeq next() {
                ReactiveSeq res = next.orElse(null);
                next = Maybe.fromEval(Eval.later(this::genNext)).flatMap(i->i);
                return res;
            }
            private Option> first(){
                return indices.length>0 ? Option.some(ReactiveSeq.fromList(buildList())) : Option.none();
            }
            private Option> genNext() {
               return gen(findNextIndexByOrder());
            }

            private Option> gen(int next) {
                return next<0 ? Option.none() : Option.some(ReactiveSeq.fromList(buildAndSwap(next)));

            }
            private void swapIndices(int i) {
                swap(i++,findMinIndex(i-1));
                for (int j = indices.length - 1 ;i < j;i++,j--) {
                    swap(i,j);
                }
            }

            private int findNextIndexByOrder(){
                int i = indices.length - 2;
                for (;i >= 0 && indices[i] > indices[i + 1];--i) {

                }
                return i;
            }
            private int findMinIndex(int startIdx){
                int idx = startIdx + 1;
                int currentMinValue = indices[idx];
                int minIndex = idx;
                for(;idx buildAndSwap(int next) {
                swapIndices(next);
                return buildList();
            }
            private List buildList() {
                final List result = new ArrayList<>(indices.length);
                for (int idx : indices) {
                    result.add((T)a[idx]);
                }
                return result;
            }

            private  void swap(int a, int b) {
                int temp = indices[a];
                indices[a] = indices[b];
                indices[b] = temp;
            }

        };

        return ReactiveSeq.fromIterator(iter);
    }
    /**
     * Perform a For Comprehension over a Stream, accepting 3 generating arrow.
     * This results in a four level nested internal iteration over the provided Publishers.
     *
     *  
     * {@code
     *
     *   import static cyclops.companion.Streams.forEach4;
     *
          forEach4(IntStream.range(1,10).boxed(),
                   a-> Stream.iterate(a,i->i+1).limit(10),
                  (a,b) -> Stream.of(a+b),
                  (a,b,c) -> Stream.just(a+b+c),
                  Tuple::tuple)
     *
     * }
     * 
* * @param value1 top level Stream * @param value2 Nested Stream * @param value3 Nested Stream * @param value4 Nested Stream * @param yieldingFunction Generates a result per combination * @return Stream with an element per combination of nested publishers generated by the yielding function */ public static Stream forEach4(Stream value1, Function> value2, BiFunction> value3, Function3> value4, Function4 yieldingFunction) { return value1.flatMap(in -> { Stream a = value2.apply(in); return a.flatMap(ina -> { Stream b = value3.apply(in,ina); return b.flatMap(inb -> { Stream c = value4.apply(in,ina,inb); return c.map(in2 -> yieldingFunction.apply(in, ina, inb, in2)); }); }); }); } /** * Perform a For Comprehension over a Stream, accepting 3 generating function. * This results in a four level nested internal iteration over the provided Publishers. *
     * {@code
     *
     *  import static com.oath.cyclops.reactor.Streames.forEach4;
     *
     *  forEach4(IntStream.range(1,10).boxed(),
                 a-> Stream.iterate(a,i->i+1).limit(10),
                 (a,b) -> Stream.just(a+b),
                 (a,b,c) -> Stream.just(a+b+c),
                 (a,b,c,d) -> a+b+c+d <100,
                 Tuple::tuple);
     *
     * }
     * 
* * @param value1 top level Stream * @param value2 Nested Stream * @param value3 Nested Stream * @param value4 Nested Stream * @param filterFunction A filtering function, keeps values where the predicate holds * @param yieldingFunction Generates a result per combination * @return Stream with an element per combination of nested publishers generated by the yielding function */ public static Stream forEach4(Stream value1, Function> value2, BiFunction> value3, Function3> value4, Function4 filterFunction, Function4 yieldingFunction) { return value1.flatMap(in -> { Stream a = value2.apply(in); return a.flatMap(ina -> { Stream b = value3.apply(in,ina); return b.flatMap(inb -> { Stream c = value4.apply(in,ina,inb); return c.filter(in2->filterFunction.apply(in,ina,inb,in2)) .map(in2 -> yieldingFunction.apply(in, ina, inb, in2)); }); }); }); } /** * Perform a For Comprehension over a Stream, accepting 2 generating function. * This results in a three level nested internal iteration over the provided Publishers. * *
     * {@code
     *
     * import static Streams.forEach3;
     *
     * forEach(IntStream.range(1,10).boxed(),
                a-> Stream.iterate(a,i->i+1).limit(10),
                (a,b) -> Stream.of(a+b),
                Tuple::tuple);
     *
     * }
     * 
* * * @param value1 top level Stream * @param value2 Nested Stream * @param value3 Nested Stream * @param yieldingFunction Generates a result per combination * @return Stream with an element per combination of nested publishers generated by the yielding function */ public static Stream forEach3(Stream value1, Function> value2, BiFunction> value3, Function3 yieldingFunction) { return value1.flatMap(in -> { Stream a = value2.apply(in); return a.flatMap(ina -> { Stream b = value3.apply(in,ina); return b.map(in2 -> yieldingFunction.apply(in, ina, in2)); }); }); } /** * Perform a For Comprehension over a Stream, accepting 2 generating function. * This results in a three level nested internal iteration over the provided Publishers. *
     * {@code
     *
     * import static Streams.forEach;
     *
     * forEach(IntStream.range(1,10).boxed(),
               a-> Stream.iterate(a,i->i+1).limit(10),
              (a,b) -> Stream.of(a+b),
              (a,b,c) ->a+b+c<10,
              Tuple::tuple)
                .listX();
     * }
     * 
* * @param value1 top level Stream * @param value2 Nested publisher * @param value3 Nested publisher * @param filterFunction A filtering function, keeps values where the predicate holds * @param yieldingFunction Generates a result per combination * @return */ public static Stream forEach3(Stream value1, Function> value2, BiFunction> value3, Function3 filterFunction, Function3 yieldingFunction) { return value1.flatMap(in -> { Stream a = value2.apply(in); return a.flatMap(ina -> { Stream b = value3.apply(in,ina); return b.filter(in2->filterFunction.apply(in,ina,in2)) .map(in2 -> yieldingFunction.apply(in, ina, in2)); }); }); } /** * Perform a For Comprehension over a Stream, accepting an additonal generating function. * This results in a two level nested internal iteration over the provided Publishers. * *
     * {@code
     *
     *  import static Streams.forEach2;
     *  forEach(IntStream.range(1, 10).boxed(),
     *          i -> Stream.range(i, 10), Tuple::tuple)
            .forEach(System.out::println);

    //(1, 1)
    (1, 2)
    (1, 3)
    (1, 4)
    ...
     *
     * }
* * @param value1 top level Stream * @param value2 Nested publisher * @param yieldingFunction Generates a result per combination * @return */ public static Stream forEach2(Stream value1, Function> value2, BiFunction yieldingFunction) { return value1.flatMap(in -> { Stream a = value2.apply(in); return a.map(in2 -> yieldingFunction.apply(in, in2)); }); } /** * *
     * {@code
     *
     *   import static Streams.forEach2;
     *
     *   forEach(IntStream.range(1, 10).boxed(),
     *           i -> Stream.range(i, 10),
     *           (a,b) -> a>2 && b<10,
     *           Tuple::tuple)
           .forEach(System.out::println);

    //(3, 3)
    (3, 4)
    (3, 5)
    (3, 6)
    (3, 7)
    (3, 8)
    (3, 9)
    ...

     *
     * }
* * * @param value1 top level Stream * @param value2 Nested publisher * @param filterFunction A filtering function, keeps values where the predicate holds * @param yieldingFunction Generates a result per combination * @return */ public static Stream forEach2(Stream value1, Function> value2, BiFunction filterFunction, BiFunction yieldingFunction) { return value1.flatMap(in -> { Stream a = value2.apply(in); return a.filter(in2->filterFunction.apply(in,in2)) .map(in2 -> yieldingFunction.apply(in, in2)); }); } /** * Create an Optional containing a List materialized from a Stream * *
     * {@code
     *   Optional> opt = Streams.streamToOptional(Stream.of(1,2,3));
     *
     *   //Optional[[1,2,3]]
     *
     * }
     * 
* * * @param stream To convert into an Optional * @return Optional with a List of values */ public final static Optional> streamToOptional(final Stream stream) { final List collected = stream.collect(java.util.stream.Collectors.toList()); if (collected.size() == 0) return Optional.empty(); return Optional.of(Seq.fromIterable(collected)); } /** * Convert an Optional to a Stream * *
     * {@code
     *     Stream stream = Streams.optionalToStream(Optional.of(1));
     *     //Stream[1]
     *
     *     Stream zero = Streams.optionalToStream(Optional.zero());
     *     //Stream[]
     * }
     * 
* * @param optional Optional to convert to a Stream * @return Stream with a single value (if present) created from an Optional */ public final static Stream optionalToStream(final Optional optional) { if (optional.isPresent()) return Stream.of(optional.get()); return Stream.of(); } /** * Create a CompletableFuture containing a List materialized from a Stream * * @param stream To convert into an Optional * @return CompletableFuture with a List of values */ public final static CompletableFuture> streamToCompletableFuture(final Stream stream) { return CompletableFuture.completedFuture(stream.collect(Collectors.toList())); } /** * Convert a CompletableFuture to a Stream * * @param future CompletableFuture to convert * @return Stream with a single value created from a CompletableFuture */ public final static Stream completableFutureToStream(final CompletableFuture future) { return Stream.of(future.join()); } /** * Perform a forEach operation over the Stream, without closing it, consuming only the specified number of elements from * the Stream, at this time. More elements can be consumed later, by called request on the returned Subscription * *
     * @{code
     *     Subscription next = Streams.forEach(Stream.of(1,2,3,4),2,System.out::println);
     *
     *     System.out.println("First batch processed!");
     *
     *     next.request(2);
     *
     *      System.out.println("Second batch processed!");
     *
     *     //prints
     *     1
     *     2
     *     First batch processed!
     *     3
     *     4
     *     Second batch processed!
     * }
     * 
* * @param stream - the Stream to consume data from * @param x To consume from the Stream at this time * @param consumerElement To accept incoming events from the Stream * @return Subscription so that further processing can be continued or cancelled. */ public static Subscription forEach(final Stream stream, final long x, final Consumer consumerElement) { val t2 = FutureStreamUtils.forEachX(stream, x, consumerElement); t2._2().run(); return t2._1().join(); } /** * Perform a forEach operation over the Stream without closing it, capturing any elements and errors in the supplied consumers, but only consuming * the specified number of elements from the Stream, at this time. More elements can be consumed later, by called request on the returned Subscription *
     * @{code
     *     Subscription next = Streams.forEach(Stream.of(()->1,()->2,()->{throw new RuntimeException()},()->4)
     *                                  .map(Supplier::getValue),System.out::println, e->e.printStackTrace());
     *
     *     System.out.println("First batch processed!");
     *
     *     next.request(2);
     *
     *      System.out.println("Second batch processed!");
     *
     *     //prints
     *     1
     *     2
     *     First batch processed!
     *
     *     RuntimeException Stack Trace on System.err
     *
     *     4
     *     Second batch processed!
     * }
     * 
* * @param stream - the Stream to consume data from * @param x To consume from the Stream at this time * @param consumerElement To accept incoming elements from the Stream * @param consumerError To accept incoming processing errors from the Stream * @return Subscription so that further processing can be continued or cancelled. */ public static Subscription forEach(final Stream stream, final long x, final Consumer consumerElement, final Consumer consumerError) { val t2 = FutureStreamUtils.forEachXWithError(stream, x, consumerElement, consumerError); t2._2().run(); return t2._1().join(); } /** * Perform a forEach operation over the Stream without closing it, capturing any elements and errors in the supplied consumers, but only consuming * the specified number of elements from the Stream, at this time. More elements can be consumed later, by called request on the returned Subscription, * when the entire Stream has been processed an onComplete event will be recieved. * *
     * @{code
     *     Subscription next = Streams.forEach(Stream.of(()->1,()->2,()->{throw new RuntimeException()},()->4)
     *                                  .map(Supplier::getValue) ,System.out::println, e->e.printStackTrace(),()->System.out.println("the take!"));
     *
     *     System.out.println("First batch processed!");
     *
     *     next.request(2);
     *
     *      System.out.println("Second batch processed!");
     *
     *     //prints
     *     1
     *     2
     *     First batch processed!
     *
     *     RuntimeException Stack Trace on System.err
     *
     *     4
     *     Second batch processed!
     *     The take!
     * }
     * 
* @param stream - the Stream to consume data from * @param x To consume from the Stream at this time * @param consumerElement To accept incoming elements from the Stream * @param consumerError To accept incoming processing errors from the Stream * @param onComplete To run after an onComplete event * @return Subscription so that further processing can be continued or cancelled. */ public static Subscription forEach(final Stream stream, final long x, final Consumer consumerElement, final Consumer consumerError, final Runnable onComplete) { val t2 = FutureStreamUtils.forEachXEvents(stream, x, consumerElement, consumerError, onComplete); t2._2().run(); return t2._1().join(); } /** * Perform a forEach operation over the Stream capturing any elements and errors in the supplied consumers, *
     * @{code
     *     Subscription next = Streams.forEach(Stream.of(()->1,()->2,()->{throw new RuntimeException()},()->4)
     *                                  .map(Supplier::getValue),System.out::println, e->e.printStackTrace());
     *
     *     System.out.println("processed!");
     *
     *
     *
     *     //prints
     *     1
     *     2
     *     RuntimeException Stack Trace on System.err
     *     4
     *     processed!
     *
     * }
     * 
* @param stream - the Stream to consume data from * @param consumerElement To accept incoming elements from the Stream * @param consumerError To accept incoming processing errors from the Stream */ public static void forEach(final Stream stream, final Consumer consumerElement, final Consumer consumerError) { val t2 = FutureStreamUtils.forEachWithError(stream, consumerElement, consumerError); t2._2().run(); } /** * Perform a forEach operation over the Stream capturing any elements and errors in the supplied consumers * when the entire Stream has been processed an onComplete event will be recieved. * *
     * @{code
     *     Subscription next = Streams.forEach(Stream.of(()->1,()->2,()->{throw new RuntimeException()},()->4)
     *                                  .map(Supplier::getValue),System.out::println, e->e.printStackTrace(),()->System.out.println("the take!"));
     *
     *     System.out.println("processed!");
     *
     *
     *     //prints
     *     1
     *     2
     *     RuntimeException Stack Trace on System.err
     *      4
     *     processed!
     *
     *
     * }
     * 
* @param stream - the Stream to consume data from * @param consumerElement To accept incoming elements from the Stream * @param consumerError To accept incoming processing errors from the Stream * @param onComplete To run after an onComplete event * @return Subscription so that further processing can be continued or cancelled. */ public static void forEach(final Stream stream, final Consumer consumerElement, final Consumer consumerError, final Runnable onComplete) { val t2 = FutureStreamUtils.forEachEvent(stream, consumerElement, consumerError, onComplete); t2._2().run(); } /** * Execute this Stream on a schedule * *
     * {@code
     *  //run at 8PM every night
     * Streams.schedule(Stream.generate(()->"next job:"+formatDate(new Date()))
     *            .map(this::processJob)
     *            ,"0 20 * * *",Executors.newScheduledThreadPool(1)));
     * }
     * 
* * Connect to the Scheduled Stream * *
     * {@code
     * Connectable dataStream = Streams.schedule(Stream.generate(()->"next job:"+formatDate(new Date()))
     *            							  .map(this::processJob)
     *            							  ,"0 20 * * *",Executors.newScheduledThreadPool(1)));
     *
     *
     * data.connect().forEach(this::logToDB);
     * }
     * 
* * * @param stream the stream to schedule element processing on * @param cron Expression that determines when each job will run * @param ex ScheduledExecutorService * @return Connectable Connectable of emitted from scheduled Stream */ public static Connectable schedule(final Stream stream, final String cron, final ScheduledExecutorService ex) { return new NonPausableConnectable<>( stream).schedule(cron, ex); } /** * Execute this Stream on a schedule * *
     * {@code
     *  //run every 60 seconds after last job completes
     *  Streams.scheduleFixedDelay(Stream.generate(()->"next job:"+formatDate(new Date()))
     *            .map(this::processJob)
     *            ,60_000,Executors.newScheduledThreadPool(1)));
     * }
     * 
* * Connect to the Scheduled Stream * *
     * {@code
     * Connectable dataStream = Streams.scheduleFixedDelay(Stream.generate(()->"next job:"+formatDate(new Date()))
     *            							  .map(this::processJob)
     *            							  ,60_000,Executors.newScheduledThreadPool(1)));
     *
     *
     * data.connect().forEach(this::logToDB);
     * }
     * 
* * * @param stream the stream to schedule element processing on * @param delay Between last element completes passing through the Stream until the next one starts * @param ex ScheduledExecutorService * @return Connectable Connectable of emitted from scheduled Stream */ public static Connectable scheduleFixedDelay(final Stream stream, final long delay, final ScheduledExecutorService ex) { return new NonPausableConnectable<>( stream).scheduleFixedDelay(delay, ex); } /** * Execute this Stream on a schedule * *
     * {@code
     *  //run every 60 seconds
     *  Streams.scheduleFixedRate(Stream.generate(()->"next job:"+formatDate(new Date()))
     *            .map(this::processJob),
     *            60_000,Executors.newScheduledThreadPool(1)));
     * }
     * 
* * Connect to the Scheduled Stream * *
     * {@code
     * Connectable dataStream = Streams.scheduleFixedRate(Stream.generate(()->"next job:"+formatDate(new Date()))
     *            							  .map(this::processJob)
     *            							  ,60_000,Executors.newScheduledThreadPool(1)));
     *
     *
     * data.connect().forEach(this::logToDB);
     * }
     * 
* @param stream the stream to schedule element processing on * @param rate Time in millis between job runs * @param ex ScheduledExecutorService * @return Connectable Connectable of emitted from scheduled Stream */ public static Connectable scheduleFixedRate(final Stream stream, final long rate, final ScheduledExecutorService ex) { return new NonPausableConnectable<>( stream).scheduleFixedRate(rate, ex); } /** * Split at supplied location *
     * {@code
     * ReactiveSeq.of(1,2,3).splitAt(1)
     *
     *  //Stream[1], Stream[2,3]
     * }
     *
     * 
*/ public final static Tuple2, Stream> splitAt(final Stream stream, final int where) { final Tuple2, Stream> Tuple2 = duplicate(stream); return Tuple.tuple( Tuple2._1().limit(where), Tuple2._2().skip(where)); } /** * Split stream at point where predicate no longer holds *
     * {@code
     *   ReactiveSeq.of(1, 2, 3, 4, 5, 6).splitBy(i->i<4)
     *
     *   //Stream[1,2,3] Stream[4,5,6]
     * }
     * 
*/ public final static Tuple2, Stream> splitBy(final Stream stream, final Predicate splitter) { final Tuple2, Stream> Tuple2 = duplicate(stream); return Tuple.tuple( takeWhile(Tuple2._1(), splitter), dropWhile(Tuple2._2(), splitter)); } /** * Partition a Stream into two one a per element basis, based on predicate's boolean value *
     * {@code
     *  ReactiveSeq.of(1, 2, 3, 4, 5, 6).partition(i -> i % 2 != 0)
     *
     *  //Stream[1,3,5], Stream[2,4,6]
     * }
     *
     * 
*/ public final static Tuple2, Stream> partition(final Stream stream, final Predicate splitter) { final Tuple2, Stream> Tuple2 = duplicate(stream); return Tuple.tuple( Tuple2._1().filter(splitter), Tuple2._2().filter(splitter.negate())); } /** * Duplicate a Stream, buffers intermediate values, leaders may change positions so a limit * can be safely applied to the leading stream. Not thread-safe. *
     * {@code
     *  Tuple2, ReactiveSeq> copies =of(1,2,3,4,5,6).duplicate();
    	 assertTrue(copies._1.anyMatch(i->i==2));
    	 assertTrue(copies._2.anyMatch(i->i==2));
     *
     * }
     * 
* * @return duplicated stream */ public final static Tuple2, Stream> duplicate(final Stream stream) { final Tuple2, Iterator> Tuple2 = Streams.toBufferingDuplicator(stream.iterator()); return Tuple.tuple( Streams.stream(Tuple2._1()), Streams.stream(Tuple2._2())); } /** * Duplicate a Stream, buffers intermediate values, leaders may change positions so a limit * can be safely applied to the leading stream. Not thread-safe. *
     * {@code
     *  Tuple2, ReactiveSeq> copies =of(1,2,3,4,5,6).duplicate();
    assertTrue(copies._1.anyMatch(i->i==2));
    assertTrue(copies._2.anyMatch(i->i==2));
     *
     * }
     * 
* * @return duplicated stream */ public final static Tuple2, Stream> duplicate(final Stream stream,Supplier> bufferFactory) { final Tuple2, Iterator> Tuple2 = Streams.toBufferingDuplicator(stream.iterator(),bufferFactory); return Tuple.tuple( Streams.stream(Tuple2._1()), Streams.stream(Tuple2._2())); } private final static Tuple2, Stream> duplicatePos(final Stream stream, final int pos) { final Tuple2, Iterator> Tuple2 = Streams.toBufferingDuplicator(stream.iterator(), pos); return Tuple.tuple( Streams.stream(Tuple2._1()), Streams.stream(Tuple2._2())); } /** * Triplicates a Stream * Buffers intermediate values, leaders may change positions so a limit * can be safely applied to the leading stream. Not thread-safe. *
     * {@code
     * 	Tuple3>,ReactiveSeq>,ReactiveSeq>> Tuple3 = sequence.triplicate();

     * }
     * 
*/ @SuppressWarnings("unchecked") public final static Tuple3, Stream, Stream> triplicate(final Stream stream) { final Stream> its = Streams.toBufferingCopier(stream.iterator(), 3) .stream() .map(it -> Streams.stream(it)); final Iterator> it = its.iterator(); return new Tuple3( it.next(), it.next(), it.next()); } /** * Triplicates a Stream * Buffers intermediate values, leaders may change positions so a limit * can be safely applied to the leading stream. Not thread-safe. *
     * {@code
     * 	Tuple3>,ReactiveSeq>,ReactiveSeq>> Tuple3 = sequence.triplicate();

     * }
     * 
*/ @SuppressWarnings("unchecked") public final static Tuple3, Stream, Stream> triplicate(final Stream stream, Supplier> bufferFactory) { final Stream> its = Streams.toBufferingCopier(stream.iterator(), 3,bufferFactory) .stream() .map(it -> Streams.stream(it)); final Iterator> it = its.iterator(); return new Tuple3( it.next(), it.next(), it.next()); } /** * Makes four copies of a Stream * Buffers intermediate values, leaders may change positions so a limit * can be safely applied to the leading stream. Not thread-safe. * *
     * {@code
     *
     * 		Tuple4>,ReactiveSeq>,ReactiveSeq>,ReactiveSeq>> quad = sequence.quadruplicate();

     * }
     * 
* @return */ @SuppressWarnings("unchecked") public final static Tuple4, Stream, Stream, Stream> quadruplicate(final Stream stream) { final Stream> its = Streams.toBufferingCopier(stream.iterator(), 4) .stream() .map(it -> Streams.stream(it)); final Iterator> it = its.iterator(); return new Tuple4( it.next(), it.next(), it.next(), it.next()); } /** * Makes four copies of a Stream * Buffers intermediate values, leaders may change positions so a limit * can be safely applied to the leading stream. Not thread-safe. * *
     * {@code
     *
     * 		Tuple4>,ReactiveSeq>,ReactiveSeq>,ReactiveSeq>> quad = sequence.quadruplicate();

     * }
     * 
* @return */ @SuppressWarnings("unchecked") public final static Tuple4, Stream, Stream, Stream> quadruplicate(final Stream stream, Supplier> bufferFactory) { final Stream> its = Streams.toBufferingCopier(stream.iterator(), 4,bufferFactory) .stream() .map(it -> Streams.stream(it)); final Iterator> it = its.iterator(); return new Tuple4( it.next(), it.next(), it.next(), it.next()); } /** * Append Stream to this Stream * *
     * {@code
     * List result = 	of(1,2,3).appendStream(of(100,200,300))
    									.map(it ->it+"!!")
    									.collect(CyclopsCollectors.toList());

    		assertThat(result,equalTo(Arrays.asList("1!!","2!!","3!!","100!!","200!!","300!!")));
     * }
     * 
* * @param stream1 to append to * @param append to append with * @return Stream with Stream appended */ public static final Stream appendStream(final Stream stream1, final Stream append) { return Stream.concat(stream1, append); } /** * Prepend Stream to this Stream * *
     * {@code
     * List result = of(1,2,3).prependStream(of(100,200,300))
    			.map(it ->it+"!!").collect(CyclopsCollectors.toList());

    		assertThat(result,equalTo(Arrays.asList("100!!","200!!","300!!","1!!","2!!","3!!")));
     *
     * }
     * 
* * @param stream1 to Prepend to * @param prepend to Prepend with * @return Stream with Stream prepended */ public static final Stream prependStream(final Stream stream1, final Stream prepend) { return Stream.concat(prepend, stream1); } /** * Append values to the take of this Stream *
     * {@code
     * List result = 	of(1,2,3).append(100,200,300)
    									.map(it ->it+"!!")
    									.collect(CyclopsCollectors.toList());

    		assertThat(result,equalTo(Arrays.asList("1!!","2!!","3!!","100!!","200!!","300!!")));
     * }
     * 
* @param values to append * @return Stream with appended values */ public static final Stream append(final Stream stream, final T... values) { return appendStream(stream, Stream.of(values)); } /** * Prepend given values to the skip of the Stream *
     * {@code
     * List result = 	of(1,2,3).prependAll(100,200,300)
    			.map(it ->it+"!!").collect(CyclopsCollectors.toList());

    		assertThat(result,equalTo(Arrays.asList("100!!","200!!","300!!","1!!","2!!","3!!")));
     * }
     * @param values to prependAll
     * @return Stream with values prepended
     */
    public static final  Stream prepend(final Stream stream, final T... values) {
        return appendStream(Stream.of(values), stream);
    }

    /**
     * Insert data into a stream at given position
     * 
     * {@code
     * List result = 	of(1,2,3).insertAt(1,100,200,300)
    			.map(it ->it+"!!").collect(CyclopsCollectors.toList());

    		assertThat(result,equalTo(Arrays.asList("1!!","100!!","200!!","300!!","2!!","3!!")));
     *
     * }
     * 
* @param pos to insert data at * @param values to insert * @return Stream with new data inserted */ public static final Stream insertAt(final Stream stream, final int pos, final T... values) { final Tuple2, Stream> Tuple2 = duplicatePos(stream, pos); return appendStream(append(Tuple2._1().limit(pos), values), Tuple2._2().skip(pos)); } /** * Delete elements between given indexes in a Stream *
     * {@code
     * List result = 	Streams.deleteBetween(Stream.of(1,2,3,4,5,6),2,4)
    										.map(it ->it+"!!")
    										.collect(CyclopsCollectors.toList());

    		assertThat(result,equalTo(Arrays.asList("1!!","2!!","5!!","6!!")));
     * }
     * 
* @param start index * @param end index * @return Stream with elements removed */ public static final Stream deleteBetween(final Stream stream, final int start, final int end) { final Tuple2, Stream> Tuple2 = duplicatePos(stream, start); return appendStream(Tuple2._1().limit(start), Tuple2._2().skip(end)); } /** * Insert a Stream into the middle of this stream at the specified position *
     * {@code
     * List result = 	Streams.insertAt(Stream.of(1,2,3),1,of(100,200,300))
    										.map(it ->it+"!!")
    										.collect(CyclopsCollectors.toList());

    		assertThat(result,equalTo(Arrays.asList("1!!","100!!","200!!","300!!","2!!","3!!")));
     * }
     * 
* @param stream1 to insert in * @param pos to insert Stream at * @param insert to insert * @return newly conjoined Stream */ public static final Stream insertStreamAt(final Stream stream1, final int pos, final Stream insert) { final Tuple2, Stream> Tuple2 = duplicatePos(stream1, pos); return appendStream(appendStream(Tuple2._1().limit(pos), insert), Tuple2._2().skip(pos)); } /** * skip elements in Stream until Predicate holds true *
     * {@code  Streams.dropUntil(Stream.of(4,3,6,7),i->i==6).collect(CyclopsCollectors.toList())
     *  // [6,7]
     *  }
* @param stream Stream to skip elements from * @param predicate to applyHKT * @return Stream with elements skipped */ public static Stream dropUntil(final Stream stream, final Predicate predicate) { return dropWhile(stream, predicate.negate()); } public static Stream dropRight(final Stream stream, final int num) { return StreamSupport.stream(SkipLastSpliterator.dropRight(stream.spliterator(),num),stream.isParallel()); } public static Stream takeRight(final Stream stream, final int num) { return StreamSupport.stream(LimitLastSpliterator.takeRight(stream.spliterator(), num),stream.isParallel()); } public static Stream recover(final Stream stream, final Function fn) { return new RecoverOperator<>( stream, Throwable.class).recover(fn); } public static Stream recover(final Stream stream, final Class type, final Function fn) { return new RecoverOperator( stream, (Class) type).recover((Function) fn); } /** * skip elements in a Stream while Predicate holds true * *
     *
     * {@code  Streams.dropWhile(Stream.of(4,3,6,7).sorted(),i->i<6).collect(CyclopsCollectors.toList())
     *  // [6,7]
     *  }
* @param stream * @param predicate * @return */ public static Stream dropWhile(final Stream stream, final Predicate predicate) { return StreamSupport.stream(new SkipWhileSpliterator(stream.spliterator(),predicate), stream.isParallel()); } public static Stream take(final Stream stream, final long time, final TimeUnit unit) { return StreamSupport.stream( new LimitWhileTimeSpliterator(stream.spliterator(),time,unit),stream.isParallel()); } public static Stream drop(final Stream stream, final long time, final TimeUnit unit) { return StreamSupport.stream(new SkipWhileTimeSpliterator(stream.spliterator(),time,unit),stream.isParallel()); } public static Stream combine(final Stream stream, final BiPredicate predicate, final BinaryOperator op) { final Iterator it = stream.iterator(); final Object UNSET = new Object(); return Streams.stream(new Iterator>() { T current = (T) UNSET; @Override public boolean hasNext() { return it.hasNext() || current != UNSET; } @Override public ReactiveSeq next() { while (it.hasNext()) { final T next = it.next(); if (current == UNSET) { current = next; } else if (predicate.test(current, next)) { current = op.apply(current, next); } else { final T result = current; current = (T) UNSET; return ReactiveSeq.of(result, next); } } if (it.hasNext()) return ReactiveSeq.empty(); final T result = current; current = (T) UNSET; return ReactiveSeq.of(result); } }) .flatMap(Function.identity()); } public static Iterable> combineI(final Iterable stream, final BiPredicate predicate, final BinaryOperator op) { final Object UNSET = new Object(); return ()-> new Iterator>() { T current = (T) UNSET; final Iterator it = stream.iterator(); @Override public boolean hasNext() { return it.hasNext() || current != UNSET; } @Override public ReactiveSeq next() { while (it.hasNext()) { final T next = it.next(); if (current == UNSET) { current = next; } else if (predicate.test(current, next)) { current = op.apply(current, next); } else { final T result = current; current = (T) UNSET; return ReactiveSeq.of(result, next); } } if (it.hasNext()) return ReactiveSeq.empty(); final T result = current; current = (T) UNSET; return ReactiveSeq.of(result); } }; } /** * Take elements from a stream while the predicates hold *
     * {@code Streams.takeWhile(Stream.of(4,3,6,7).sorted(),i->i<6).collect(CyclopsCollectors.toList());
     * //[4,3]
     * }
     * 
* @param stream * @param predicate * @return */ public static Stream takeWhile(final Stream stream, final Predicate predicate) { return StreamSupport.stream(new LimitWhileSpliterator<>(stream.spliterator(), predicate),stream.isParallel()); } /** * Take elements from a Stream until the predicate holds *
     * {@code Streams.takeUntil(Stream.of(4,3,6,7),i->i==6).collect(CyclopsCollectors.toList());
     * //[4,3]
     * }
     * 
* @param stream * @param predicate * @return */ public static Stream takeUntil(final Stream stream, final Predicate predicate) { return takeWhile(stream, predicate.negate()); } /** * Reverse a Stream * *
     * {@code
     * assertThat(Streams.reverse(Stream.of(1,2,3)).collect(CyclopsCollectors.toList())
    			,equalTo(Arrays.asList(3,2,1)));
     * }
     * 
* * @param stream Stream to reverse * @return Reversed stream */ public static Stream reverse(final Stream stream) { return ReactiveSeq.of(1).flatMap(i->reversedStream(stream.collect(java.util.stream.Collectors.toList()))); } /** * Create a reversed Stream from a List *
     * {@code
     * Streams.reversedStream(asList(1,2,3))
    			.map(i->i*100)
    			.forEach(System.out::println);


    	assertThat(Streams.reversedStream(Arrays.asList(1,2,3)).collect(CyclopsCollectors.toList())
    			,equalTo(Arrays.asList(3,2,1)));
     *
     * }
     * 
* * @param list List to create a reversed Stream from * @return Reversed Stream */ public static Stream reversedStream(final List list) { return new ReversedIterator<>( list).stream(); } /** * Create a new Stream that infiniteable cycles the provided Stream * *
     * {@code
     * assertThat(Streams.cycle(Stream.of(1,2,3))
     * 						.limit(6)
     * 						.collect(CyclopsCollectors.toList()),
     * 								equalTo(Arrays.asList(1,2,3,1,2,3)));
    	}
     * 
* @param s Stream to cycle * @return New cycling stream */ public static Stream cycle(final Stream s) { return cycle(Streamable.fromStream(s)); } /** * Create a Stream that infiniteable cycles the provided Streamable * @param s Streamable to cycle * @return New cycling stream */ public static Stream cycle(final Streamable s) { return Stream.iterate(s.stream(), s1 -> s.stream()) .flatMap(Function.identity()); } /** * Create a Stream that finitely cycles the provided Streamable, provided number of times * *
     * {@code
     * assertThat(Streams.cycle(3,Streamable.of(1,2,2))
    							.collect(CyclopsCollectors.toList()),
    								equalTo(Arrays.asList(1,2,2,1,2,2,1,2,2)));
     * }
     * 
* @param s Streamable to cycle * @return New cycling stream */ public static Stream cycle(final long times, final Streamable s) { return Stream.iterate(s.stream(), s1 -> s.stream()) .limit(times) .flatMap(Function.identity()); } /** * Create a stream from an iterable *
     * {@code
     * 	assertThat(Streams.stream(Arrays.asList(1,2,3))
     * 								.collect(CyclopsCollectors.toList()),
     * 									equalTo(Arrays.asList(1,2,3)));

     *
     * }
     * 
* @param it Iterable to convert to a Stream * @return Stream from iterable */ public static Stream stream(final Iterable it) { return StreamSupport.stream(it.spliterator(), false); } public static Stream stream(final Spliterator it) { return StreamSupport.stream(it, false); } /** * Create a stream from an iterator *
     * {@code
     * 	assertThat(Streams.stream(Arrays.asList(1,2,3).iterator())
     * 							.collect(CyclopsCollectors.toList()),
     * 								equalTo(Arrays.asList(1,2,3)));

     * }
     * 
* @param it Iterator to convert to a Stream * @return Stream from iterator */ public static Stream stream(final Iterator it) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(it, Spliterator.ORDERED), false); } /** * Concat an Object and a Stream * If the Object is a Stream, Streamable or Iterable will be converted (or left) in Stream form and concatonated * Otherwise a new Stream.of(o) is created * * @param o Object to concat * @param stream Stream to concat * @return Concatonated Stream */ @Deprecated public static Stream concat(final Object o, final Stream stream) { Stream first = null; if (o instanceof Stream) { first = (Stream) o; } else if (o instanceof Iterable) { first = stream((Iterable) o); } else if (o instanceof Streamable) { first = ((Streamable) o).stream(); } else { first = Stream.of((U) o); } return Stream.concat(first, stream); } /** * Create a stream from a transform *
     * {@code
     * 	Map transform = new HashMap<>();
    	transform.put("hello","world");
    	assertThat(Streams.stream(transform).collect(CyclopsCollectors.toList()),equalTo(Arrays.asList(new AbstractMap.SimpleEntry("hello","world"))));

     * }
* * * @param it Iterator to convert to a Stream * @return Stream from a transform */ public final static Stream> stream(final Map it) { return it.entrySet() .stream(); } public final static T firstValue(final Stream stream) { return stream.findAny() .get(); } /** * Simultaneously reduce a stream with multiple reducers * *
{@code
     *
     *  Monoid sum = Monoid.of(0,(a,b)->a+b);
    	Monoid mult = Monoid.of(1,(a,b)->a*b);
    	val result = Streams.reduce(Stream.of(1,2,3,4),Arrays.asList(sum,mult));


    	assertThat(result,equalTo(Arrays.asList(10,24)));
    	}
* * @param stream Stream to reduce * @param reducers Reducers to reduce Stream * @return Reduced Stream values as List entries */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static Seq reduce(final Stream stream, final Iterable> reducers) { return Seq.fromIterable(new MultiReduceOperator( stream).reduce(reducers)); } /** * Simultanously reduce a stream with multiple reducers * *
     * {@code
     *  Monoid concat = Monoid.of("",(a,b)->a+b);
    	Monoid join = Monoid.of("",(a,b)->a+","+b);
    	assertThat(Streams.reduce(Stream.of("hello", "world", "woo!"),Stream.of(concat,join))
    	                 ,equalTo(Arrays.asList("helloworldwoo!",",hello,world,woo!")));
     * }
     * 
* * @param stream Stream to reduce * @param reducers Reducers to reduce Stream * @return Reduced Stream values as List entries */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static Seq reduce(final Stream stream, final Stream> reducers) { return reduce(stream, Seq.fromIterable((List) reducers.collect(java.util.stream.Collectors.toList()))); } /** * Repeat in a Stream while specified predicate holds *
     * {@code
     *  int count =0;
     *
    	assertThat(Streams.cycleWhile(Stream.of(1,2,2)
    										,next -> count++<6 )
    										.collect(CyclopsCollectors.toList()),equalTo(Arrays.asList(1,2,2,1,2,2)));
     * }
     * 
* @param predicate * repeat while true * @return Repeating Stream */ public final static Stream cycleWhile(final Stream stream, final Predicate predicate) { return Streams.takeWhile(Streams.cycle(stream), predicate); } /** * Repeat in a Stream until specified predicate holds * *
     * {@code
     * 	count =0;
    	assertThat(Streams.cycleUntil(Stream.of(1,2,2,3)
    										,next -> count++>10 )
    										.collect(CyclopsCollectors.toList()),equalTo(Arrays.asList(1, 2, 2, 3, 1, 2, 2, 3, 1, 2, 2)));

     * }
     * 
* @param predicate * repeat while true * @return Repeating Stream */ public final static Stream cycleUntil(final Stream stream, final Predicate predicate) { return Streams.takeUntil(Streams.cycle(stream), predicate); } /** * Generic zip function. E.g. Zipping a Stream and a Sequence * *
     * {@code
     * Stream> zipped = Streams.zip(Stream.of(1,2,3)
    											,ReactiveSeq.of(2,3,4),
    												(a,b) -> Arrays.asList(a,b));


    	List zip = zipped.collect(CyclopsCollectors.toList()).getValue(1);
    	assertThat(zip.getValue(0),equalTo(2));
    	assertThat(zip.getValue(1),equalTo(3));
     * }
     * 
* @param second * Monad to zip with * @param zipper * Zipping function * @return Stream zipping two Monads */ public final static Stream zipSequence(final Stream stream, final Stream second, final BiFunction zipper) { final Iterator left = stream.iterator(); final Iterator right = second.iterator(); return Streams.stream(new Iterator() { @Override public boolean hasNext() { return left.hasNext() && right.hasNext(); } @Override public R next() { return zipper.apply(left.next(), right.next()); } }); } /** * Zip this Monad with a Stream *
       {@code
       Stream> zipped = Streams.zipStream(Stream.of(1,2,3)
    											,Stream.of(2,3,4),
    												(a,b) -> Arrays.asList(a,b));


    	List zip = zipped.collect(CyclopsCollectors.toList()).getValue(1);
    	assertThat(zip.getValue(0),equalTo(2));
    	assertThat(zip.getValue(1),equalTo(3));
       }
       
* * @param second * Stream to zip with * @param zipper * Zip funciton * @return This monad zipped with a Stream */ public final static Stream zipStream(final Stream stream, final BaseStream> second, final BiFunction zipper) { final Iterator left = stream.iterator(); final Iterator right = second.iterator(); return Streams.stream(new Iterator() { @Override public boolean hasNext() { return left.hasNext() && right.hasNext(); } @Override public R next() { return zipper.apply(left.next(), right.next()); } }); } /** * Create a sliding view over this Stream *
     * {@code
     * List> list = Streams.sliding(Stream.of(1,2,3,4,5,6)
    											,2,1)
    								.collect(CyclopsCollectors.toList());


    	assertThat(list.getValue(0),hasItems(1,2));
    	assertThat(list.getValue(1),hasItems(2,3));
     * }
     * 
* @param windowSize * Size of sliding window * @return Stream with sliding view */ public final static Stream> sliding(final Stream stream, final int windowSize, final int increment) { return StreamSupport.stream(new SlidingSpliterator<>(stream.spliterator(),Function.identity(), windowSize,increment),stream.isParallel()); } /** * Create a sliding view over this Stream *
     * {@code
     * List> list = Streams.sliding(Stream.of(1,2,3,4,5,6)
    											,2,1)
    								.collect(CyclopsCollectors.toList());


    	assertThat(list.getValue(0),hasItems(1,2));
    	assertThat(list.getValue(1),hasItems(2,3));
     * }
     * 
* @param windowSize * Size of sliding window * @return Stream with sliding view over monad */ public final static Stream> window(final Stream stream, final int windowSize, final int increment) { final Iterator it = stream.iterator(); final Mutable> list = Mutable.of(Seq.empty()); return Streams.stream(new Iterator>() { @Override public boolean hasNext() { return it.hasNext(); } @Override public Streamable next() { for (int i = 0; i < increment && list.get() .size() > 0; i++) list.mutate(var -> var.removeAt(0)); for (; list.get() .size() < windowSize && it.hasNext();) { if (it.hasNext()) { list.mutate(var -> var.insertAt(Math.max(0, var.size()), it.next())); } } return Streamable.fromIterable(list.get()); } }); } /** * Create a sliding view over this Stream *
     * {@code
     * List> list = Streams.sliding(Stream.of(1,2,3,4,5,6)
    											,2)
    								.collect(CyclopsCollectors.toList());


    	assertThat(list.getValue(0),hasItems(1,2));
    	assertThat(list.getValue(1),hasItems(2,3));
     * }
     * 
* * @param stream Stream to create sliding view on * @param windowSize size of window * @return */ public final static Stream> sliding(final Stream stream, final int windowSize) { return sliding(stream, windowSize, 1); } /** * Group elements in a Stream by size *
       {@code
     *  List> list = Streams.grouped(Stream.of(1,2,3,4,5,6)
                                                        ,3)
                                                    .collect(CyclopsCollectors.toList());


        assertThat(list.getValue(0),hasItems(1,2,3));
        assertThat(list.getValue(1),hasItems(4,5,6));
        }
     * 
* @param stream Stream to group * @param groupSize * Size of each Group * @return Stream with elements grouped by size */ public final static Stream> grouped(final Stream stream, final int groupSize) { return StreamSupport.stream(new GroupingSpliterator<>(stream.spliterator(),()->Vector.empty(), c->Vector.fromIterable(c),groupSize),stream.isParallel()); } /** * *
       {@code
     *  List> list = Streams.grouped(Stream.of(1,2,3,4,5,6)
                                                        ,3,()->SetX.zero())
                                                    .collect(CyclopsCollectors.toList());


        assertThat(list.getValue(0),hasItems(1,2,3));
        assertThat(list.getValue(1),hasItems(4,5,6));
        }
     * 
* * @param stream Stream to group * @param groupSize Size of each Group * @param factory Supplier for creating Collections for holding grouping * @return Stream with elements grouped by size */ public final static > Stream grouped(final Stream stream, final int groupSize, final Supplier factory) { return StreamSupport.stream(new GroupingSpliterator<>(stream.spliterator(),factory, Function.identity(),groupSize),stream.isParallel()); } public final static Streamable shuffle(final Stream stream) { final List list = stream.collect(java.util.stream.Collectors.toList()); Collections.shuffle(list); return Streamable.fromIterable(list); } public final static Streamable toLazyStreamable(final Stream stream) { return Streamable.fromStream(stream); } public final static Stream scanRight(final Stream stream, final U identity, final BiFunction combiner) { return ReactiveSeq.fromStream(stream) .scanRight(identity, combiner); } /** * Scan left using supplied Monoid * *
     * {@code
     *
     * 	assertEquals(asList("", "a", "ab", "abc"),
     * 					Streams.scanLeft(Stream.of("a", "b", "c"),Reducers.toString(""))
     * 			.collect(CyclopsCollectors.toList());
     *
     *         }
     * 
* * @param monoid * @return */ public final static Stream scanLeft(final Stream stream, final Monoid monoid) { final Iterator it = stream.iterator(); return Streams.stream(new Iterator() { boolean init = false; T next = monoid.zero(); @Override public boolean hasNext() { if (!init) return true; return it.hasNext(); } @Override public T next() { if (!init) { init = true; return monoid.zero(); } return next = monoid .apply(next, it.next()); } }); } /** * Check that there are specified number of matches of predicate in the Stream * *
     * {@code
     *  assertTrue(Streams.xMatch(Stream.of(1,2,3,5,6,7),3, i->i>4));
     * }
     * 
* */ public static boolean xMatch(final Stream stream, final int num, final Predicate c) { return stream.filter(t -> c.test(t)) .collect(java.util.stream.Collectors.counting()) == num; } /** *
     * {@code
     * assertThat(Streams.noneMatch(of(1,2,3,4,5),it-> it==5000),equalTo(true));
     * }
     * 
* */ public final static boolean noneMatch(final Stream stream, final Predicate c) { return stream.allMatch(c.negate()); } public final static String join(final Stream stream) { return stream.map(t -> t.toString()) .collect(java.util.stream.Collectors.joining()); } public final static String join(final Stream stream, final String sep) { return stream.map(t -> t.toString()) .collect(java.util.stream.Collectors.joining(sep)); } public final static String join(final Stream stream, final String sep, final String start, final String end) { return stream.map(t -> t.toString()) .collect(java.util.stream.Collectors.joining(sep, start, end)); } public final static > Optional minBy(final Stream stream, final Function f) { final Optional> o = stream.map(in -> new Tuple2( f.apply(in), in)) .min(Comparator.comparing(n -> n._1(), Comparator.naturalOrder())); return o.map(p -> p._2()); } public final static Optional min(final Stream stream, final Comparator comparator) { return stream.collect(java.util.stream.Collectors.minBy(comparator)); } public final static > Optional maxBy(final Stream stream, final Function f) { final Optional> o = stream.map(in -> new Tuple2( f.apply(in), in)) .max(Comparator.comparing(n -> n._1(), Comparator.naturalOrder())); return o.map(p -> p._2()); } public final static Optional max(final Stream stream, final Comparator comparator) { return stream.collect(java.util.stream.Collectors.maxBy(comparator)); } /** * Attempt to transform this Stream to the same type as the supplied Monoid (using mapToType on the monoid interface) * Then use Monoid to reduce values * * @param reducer Monoid to reduce values * @return Reduce result */ public final static R foldMap(final Stream stream, final Reducer reducer) { return reducer.foldMap(stream); } /** * Attempt to transform this Stream to the same type as the supplied Monoid, using supplied function * Then use Monoid to reduce values * * @param mapper Function to transform Monad type * @param reducer Monoid to reduce values * @return Reduce result */ public final static R foldMap(final Stream stream, final Function mapper, final Monoid reducer) { return reducer.foldLeft(stream.map(mapper)); } /** * * * @param reducer Use supplied Monoid to reduce values starting via foldLeft * @return Reduced result */ public final static T foldLeft(final Stream stream, final Monoid reducer) { return reducer.foldLeft(stream); } /** * Attempt to transform this Monad to the same type as the supplied Monoid (using mapToType on the monoid interface) * Then use Monoid to reduce values * * @param reducer Monoid to reduce values * @return Reduce result */ public final static R foldLeftMapToType(final Stream stream, final Reducer reducer) { return reducer.foldMap(stream); } /** * * * @param reducer Use supplied Monoid to reduce values starting via foldRight * @return Reduced result */ public final static T foldRight(final Stream stream, final Monoid reducer) { return reducer.foldLeft(Streams.reverse(stream)); } /** * Attempt to transform this Monad to the same type as the supplied Monoid (using mapToType on the monoid interface) * Then use Monoid to reduce values * * @param reducer Monoid to reduce values * @return Reduce result */ public final static R foldRightMapToType(final Stream stream, final Reducer reducer) { return reducer.foldMap(Streams.reverse(stream)); } /** * @return Underlying monad converted to a Streamable instance */ public final static Streamable toStreamable(final Stream stream) { return Streamable.fromStream(stream); } /** * @return This monad converted to a set */ public final static Set toSet(final Stream stream) { return stream.collect(java.util.stream.Collectors.toSet()); } /** * @return this monad converted to a list */ public final static List toList(final Stream stream) { return stream.collect(java.util.stream.Collectors.toList()); } /** * *
{@code
     * assertTrue(Streams.startsWith(Stream.of(1,2,3,4),Arrays.asList(1,2,3)));
     * }
* * @param iterable * @return True if Monad starts with Iterable sequence of data */ public final static boolean startsWith(final Stream stream, final Iterable iterable) { return startsWith(stream, iterable.iterator()); } private static Tuple2> findSize(Iterable iterable){ if(iterable instanceof Collection) { Collection col = (Collection) iterable; return Tuple.tuple(col.size(),col.iterator()); } int size=0; final Iterator it = iterable.iterator(); final List compare1 = new ArrayList<>(); while (it.hasNext()) { compare1.add(it.next()); size++; } return Tuple.tuple(size,compare1.iterator()); } public final static boolean endsWith(final Stream stream, final Iterable iterable) { Tuple2> sizeAndIterator = findSize(iterable); final Deque list = new ArrayDeque(sizeAndIterator._1()); stream.forEach(v -> { list.add(v); if (list.size() > sizeAndIterator._1()) list.remove(); }); return startsWith(list.stream(), sizeAndIterator._2()); } public final static boolean startsWith(final Stream stream, final Stream stream2) { return startsWith(stream, stream2.iterator()); } /** *
     * {@code
     * 		 assertTrue(Streams.startsWith(Stream.of(1,2,3,4),Arrays.asList(1,2,3).iterator()))
     * }
* @param iterator * @return True if Monad starts with Iterators sequence of data */ public final static boolean startsWith(final Stream stream, final Iterator iterator) { final Iterator it = stream.iterator(); while (iterator.hasNext()) { if (!it.hasNext()) return false; if (!Objects.equals(it.next(), iterator.next())) return false; } return true; } public static ReactiveSeq oneShotStream(final Iterable iterable) { Objects.requireNonNull(iterable); return new OneShotStreamX(new IteratableSpliterator(iterable), Optional.empty()); } public static ReactiveSeq oneShotStream(Stream stream){ return new OneShotStreamX(stream,Optional.empty()); } public static OneShotStreamX oneShotStream(Spliterator stream,final Optional rev){ return new OneShotStreamX(stream,rev); } public final static ReactiveSeq reactiveSeq(final Stream stream, final Optional rev) { if (stream instanceof ReactiveSeq) return (ReactiveSeq) stream; // return new StreamX((Stream) // stream, rev); return oneShotStream((Stream)stream); } public final static ReactiveSeq reactiveSeq(final Iterable iterable){ return ReactiveSeq.fromIterable(iterable); } public final static ReactiveSeq reactiveSeq(final Stream stream){ return ReactiveSeq.fromStream(stream); } public final static ReactiveSeq reactiveSeq(final Spliterator stream, final Optional rev) { return new StreamX((Spliterator) stream, rev); } /** * Returns a stream with a given value interspersed between any two values * of this stream. * *
     * {@code
     * assertThat(Arrays.asList(1, 0, 2, 0, 3, 0, 4),
     * 			equalTo( Streams.intersperse(Stream.of(1, 2, 3, 4),0));
     * }
     * 
*/ public static Stream intersperse(final Stream stream, final T value) { return stream.flatMap(t -> Stream.of(value, t)) .skip(1); } /** * Keep only those elements in a stream that are of a given type. * * * assertThat(Arrays.asList(1, 2, 3), * equalTo( Streams.ofType(Stream.of(1, "a", 2, "b", 3,Integer.class)); * */ @SuppressWarnings("unchecked") public static Stream ofType(final Stream stream, final Class type) { return stream.filter(type::isInstance) .map(t -> (U) t); } /** * Cast all elements in a stream to a given type, possibly throwing a * {@link ClassCastException}. * *
     * {@code
     * Streams.cast(Stream.of(1, "a", 2, "b", 3),Integer.class)
     *  // throws ClassCastException
     *  }
     */
    public static  Stream cast(final Stream stream, final Class type) {
        return stream.map(type::cast);
    }



    public final static  Stream narrow(Stream stream){
        return (Stream)stream;
    }


    /**
     * flatMap operation that allows a Collection to be returned
     * 
     * {@code
     * 	assertThat(Streams.flatMapCollection(Stream.of(20),i->Arrays.asList(1,2,i))
     * 								.collect(CyclopsCollectors.toList()),
     * 								equalTo(Arrays.asList(1,2,20)));

     * }
     * 
* */ public final static Stream concatMapterable(final Stream stream, final Function> fn) { return stream.flatMap(fn.andThen(c -> stream(c))); } /** *
     * {@code
     * 	assertThat(Streams.flatMapStream(Stream.of(1,2,3),
     * 							i->Stream.of(i)).collect(CyclopsCollectors.toList()),
     * 							equalTo(Arrays.asList(1,2,3)));

     *
     * }
     * 
* */ public final static Stream flatMapStream(final Stream stream, final Function> fn) { return stream.flatMap(fn.andThen(bs -> { if (bs instanceof Stream) return (Stream) bs; else return Streams.stream(bs.iterator()); })); } /** * cross type flatMap, removes null entries *
     * {@code
     * 	 assertThat(Streams.flatMapOptional(Stream.of(1,2,3,null),
     * 										Optional::ofNullable)
     * 										.collect(CyclopsCollectors.toList()),
     * 										equalTo(Arrays.asList(1,2,3)));

     * }
     * 
*/ public final static Stream flatMapOptional(final Stream stream, final Function> fn) { return stream.flatMap(in -> Streams.optionalToStream(fn.apply(in))); } public final static Stream flatten(final Stream> stream) { return stream.flatMap(Function.identity()); } /** *
     * {@code
     * 	assertThat(Streams.flatMapCompletableFuture(Stream.of(1,2,3),
     * 								i->CompletableFuture.completedFuture(i+2))
     * 								.collect(CyclopsCollectors.toList()),
     * 								equalTo(Arrays.asList(3,4,5)));

     * }
     *
*/ public final static Stream flatMapCompletableFuture(final Stream stream, final Function> fn) { return stream.flatMap(in -> Streams.completableFutureToStream(fn.apply(in))); } /** * Perform a flatMap operation where the result will be a flattened stream of Characters * from the CharSequence returned by the supplied function. * *
     * {@code
     *   List result = Streams.liftAndBindCharSequence(Stream.of("input.file"),
    								.i->"hello world")
    								.toList();

    	assertThat(result,equalTo(Arrays.asList('h','e','l','l','o',' ','w','o','r','l','d')));
     * }
     * 
* * @param fn * @return *///rename -flatMapCharSequence public final static Stream flatMapCharSequence(final Stream stream, final Function fn) { return stream.flatMap(fn.andThen(CharSequence::chars) .andThen(s->s.mapToObj(i->Character.toChars(i)[0]))); } /** * Perform a flatMap operation where the result will be a flattened stream of Strings * from the text loaded from the supplied files. * *
     * {@code
     *
    	List result = Streams.liftAndBindFile(Stream.of("input.file")
    							.map(getClass().getClassLoader()::getResource)
    							.peek(System.out::println)
    							.map(URL::getFile)
    							,File::new)
    							.toList();

    	assertThat(result,equalTo(Arrays.asList("hello","world")));
     *
     * }
     *
     * 
* * @param fn * @return */ public final static Stream flatMapFile(final Stream stream, final Function fn) { return stream.flatMap(fn.andThen(f->ExceptionSoftener.softenSupplier(()->Files.lines(Paths.get(f.getAbsolutePath()) ) ).get())); } /** * Perform a flatMap operation where the result will be a flattened stream of Strings * from the text loaded from the supplied URLs * *
     * {@code
     * List result = Streams.liftAndBindURL(Stream.of("input.file")
    							,getClass().getClassLoader()::getResource)
    							.collect(CyclopsCollectors.toList();

    	assertThat(result,equalTo(Arrays.asList("hello","world")));
     *
     * }
     * 
* * @param fn * @return */ public final static Stream flatMapURL(final Stream stream, final Function fn) { return stream.flatMap(fn.andThen(url -> ExceptionSoftener.softenSupplier(() -> { final BufferedReader in = new BufferedReader( new InputStreamReader( url.openStream())); return in.lines(); }) .get())); } /** * Perform a flatMap operation where the result will be a flattened stream of Strings * from the text loaded from the supplied BufferedReaders * *
     * List result = Streams.liftAndBindBufferedReader(Stream.of("input.file")
    							.map(getClass().getClassLoader()::getResourceAsStream)
    							.map(InputStreamReader::new)
    							,BufferedReader::new)
    							.collect(CyclopsCollectors.toList();

    	assertThat(result,equalTo(Arrays.asList("hello","world")));
     *
     * 
* * * @param fn * @return */ public final static Stream flatMapBufferedReader(final Stream stream, final Function fn) { return stream.flatMap(fn.andThen(in -> ExceptionSoftener.softenSupplier(() -> { return in.lines(); }) .get())); } public static final Tuple2, Iterable> toBufferingDuplicator(final Iterable it,Supplier> bufferFactory) { return Tuple.tuple(()-> toBufferingDuplicator(it.iterator(), Long.MAX_VALUE,bufferFactory)._1(), ()-> toBufferingDuplicator(it.iterator(), Long.MAX_VALUE,bufferFactory)._2()); } public static final Tuple2, Iterable> toBufferingDuplicator(final Iterable it) { return Tuple.tuple(()-> toBufferingDuplicator(it.iterator(), Long.MAX_VALUE)._1(), ()-> toBufferingDuplicator(it.iterator(), Long.MAX_VALUE)._2()); } public static final Tuple2, Iterator> toBufferingDuplicator(final Iterator iterator) { return toBufferingDuplicator(iterator, Long.MAX_VALUE); } public static final Tuple2, Iterator> toBufferingDuplicator(final Iterator iterator, final long pos) { final LinkedList bufferTo = new LinkedList(); final LinkedList bufferFrom = new LinkedList(); return Tuple.tuple( new DuplicatingIterator( bufferTo, bufferFrom, iterator, Long.MAX_VALUE, 0), new DuplicatingIterator( bufferFrom, bufferTo, iterator, pos, 0)); } public static final Tuple2, Iterator> toBufferingDuplicator(final Iterator iterator, Supplier> bufferFactory) { return toBufferingDuplicator(iterator, Long.MAX_VALUE,bufferFactory); } public static final Tuple2, Iterator> toBufferingDuplicator(final Iterator iterator, final long pos,Supplier> bufferFactory) { final Deque bufferTo = bufferFactory.get(); final Deque bufferFrom = bufferFactory.get(); return Tuple.tuple( new DuplicatingIterator( bufferTo, bufferFrom, iterator, Long.MAX_VALUE, 0), new DuplicatingIterator( bufferFrom, bufferTo, iterator, pos, 0)); } public static final Seq> toBufferingCopier(final Iterable it, final int copies) { return Seq.range(0,copies) .zipWithIndex() .map(t->()-> toBufferingCopier(it.iterator(),copies).getOrElseGet(t._2().intValue(),()->Arrays.asList().iterator())); } public static final Seq> toBufferingCopier(final Iterable it, final int copies,Supplier> bufferSupplier) { return Seq.range(0,copies) .zipWithIndex() .map(t->() -> toBufferingCopier(it.iterator(),copies,bufferSupplier).getOrElseGet(t._2().intValue(), ()->Arrays.asList().iterator())); } public static final Seq> toBufferingCopier(final Iterator iterator, final int copies) { final List> result = new ArrayList<>(); ArrayList> localBuffers = new ArrayList<>(copies); for(int i=0;i buffer = new LinkedList(); localBuffers.add(buffer); result.add(new CopyingIterator(localBuffers, iterator, buffer)); } return Seq.fromIterable(result); } public static final Seq> toBufferingCopier(final Iterator iterator, final int copies, Supplier> bufferSupplier) { final List> result = new ArrayList<>(); ArrayList> localBuffers = new ArrayList<>(copies); for(int i=0;i buffer = bufferSupplier.get(); localBuffers.add(buffer); result.add(new CopyingIterator(localBuffers, iterator, buffer)); } return Seq.fromIterable(result); } @AllArgsConstructor static class DuplicatingIterator implements Iterator { Deque bufferTo; Deque bufferFrom; Iterator it; long otherLimit = Long.MAX_VALUE; long counter = 0; @Override public boolean hasNext() { if (bufferFrom.size() > 0 || it.hasNext()) return true; return false; } @Override public T next() { try { if (bufferFrom.size() > 0) return bufferFrom.poll(); else { final T next = it.next(); if (counter < otherLimit) bufferTo.add(next); return next; } } finally { counter++; } } } static class CopyingIterator implements Iterator { ArrayList> localBuffers; Deque buffer; Iterator it; @Override public boolean hasNext() { return buffer.size() > 0 || it.hasNext(); } @Override public T next() { if (buffer.size() > 0) return buffer.poll(); return handleLeader(); //exceed buffer, now leading } private void offer(T value){ for(Deque next : localBuffers ){ if(next!=buffer) next.add(value); } } private T handleLeader() { final T next = it.next(); offer(next); return next; } public CopyingIterator(ArrayList> localBuffers,final Iterator it, final Deque buffer) { this.it = it; this.buffer = buffer; this.localBuffers = localBuffers; } } /** * Projects an immutable toX of this stream. Initial iteration over the toX is not thread safe * (can't be performed by multiple threads concurrently) subsequent iterations are. * * @return An immutable toX of this stream. */ public static final Collection toLazyCollection(final Stream stream) { return SeqUtils.toLazyCollection(stream.iterator()); } public static final Collection toLazyCollection(final Iterator iterator) { return SeqUtils.toLazyCollection(iterator); } /** * Lazily constructs a Collection from specified Stream. Collections iterator may be safely used * concurrently by multiple threads. * @param stream * @return */ public static final Collection toConcurrentLazyCollection(final Stream stream) { return SeqUtils.toConcurrentLazyCollection(stream.iterator()); } public static final Collection toConcurrentLazyCollection(final Iterator iterator) { return SeqUtils.toConcurrentLazyCollection(iterator); } public final static Stream> windowByTime(final Stream stream, final long time, final TimeUnit t) { final Iterator it = stream.iterator(); final long toRun = t.toNanos(time); return Streams.stream(new Iterator>() { long start = System.nanoTime(); @Override public boolean hasNext() { return it.hasNext(); } @Override public Streamable next() { final List list = new ArrayList<>(); while (System.nanoTime() - start < toRun && it.hasNext()) { list.add(it.next()); } if (list.size() == 0 && it.hasNext()) //time unit may be too small list.add(it.next()); start = System.nanoTime(); return Streamable.fromIterable(list); } }); } public final static Stream> groupedByTime(final Stream stream, final long time, final TimeUnit t) { return StreamSupport.stream(new GroupedByTimeSpliterator(stream.spliterator(),()->Seq.fromIterable(new ArrayList<>()), Function.identity(),time,t),stream.isParallel()); } @Deprecated public final static Stream> batchByTime(final Stream stream, final long time, final TimeUnit t) { return groupedByTime(stream,time,t); } public final static > Stream groupedByTime(final Stream stream, final long time, final TimeUnit t, final Supplier factory) { return StreamSupport.stream(new GroupedByTimeSpliterator(stream.spliterator(),factory, Function.identity(),time,t),stream.isParallel()); } @Deprecated public final static > Stream batchByTime(final Stream stream, final long time, final TimeUnit t, final Supplier factory) { return groupedByTime(stream,time,t,factory); } private static final Object UNSET = new Object(); /** * Group data in a Stream using knowledge of the current batch and the next entry to determing grouping limits * * @see Traversable#groupedUntil(BiPredicate) * * @param stream Stream to group * @param predicate Predicate to determine grouping * @return Stream grouped into Lists determined by predicate */ public final static Stream> groupedStatefullyUntil(final Stream stream, final BiPredicate, ? super T> predicate) { return StreamSupport.stream(new GroupedStatefullySpliterator<>(stream.spliterator(),()->Seq.of(),Function.identity(),predicate.negate()),stream.isParallel()); } /** * Group a Stream while the supplied predicate holds * * @see ReactiveSeq#groupedWhile(Predicate) * * @param stream Stream to group * @param predicate Predicate to determine grouping * @return Stream grouped into Lists determined by predicate */ public final static Stream> groupedWhile(final Stream stream, final Predicate predicate) { return StreamSupport.stream(new GroupedWhileSpliterator<>(stream.spliterator(),()->Seq.of(),Function.identity(),predicate.negate()),stream.isParallel()); } @Deprecated public final static Stream> batchWhile(final Stream stream, final Predicate predicate) { return groupedWhile(stream,predicate); } /** * Group a Stream while the supplied predicate holds * * @see ReactiveSeq#groupedWhile(Predicate, Supplier) * * @param stream Stream to group * @param predicate Predicate to determine grouping * @param factory Supplier to create toX for groupings * @return Stream grouped into Collections determined by predicate */ public final static > Stream groupedWhile(final Stream stream, final Predicate predicate, final Supplier factory) { return StreamSupport.stream(new GroupedWhileSpliterator<>(stream.spliterator(),factory,Function.identity(),predicate.negate()),stream.isParallel()); } @Deprecated public final static > Stream batchWhile(final Stream stream, final Predicate predicate, final Supplier factory) { return groupedWhile(stream,predicate,factory); } /** * Group a Stream until the supplied predicate holds * * @see ReactiveSeq#groupedUntil(Predicate) * * @param stream Stream to group * @param predicate Predicate to determine grouping * @return Stream grouped into Lists determined by predicate */ public final static Stream> groupedUntil(final Stream stream, final Predicate predicate) { return groupedWhile(stream, predicate.negate()); } @Deprecated public final static Stream> batchUntil(final Stream stream, final Predicate predicate) { return groupedUntil(stream, predicate); } /** * Group a Stream by size and time constraints * * @see ReactiveSeq#groupedBySizeAndTime(int, long, TimeUnit) * * @param stream Stream to group * @param size Max group size * @param time Max group time * @param t Time unit for max group time * @return Stream grouped by time and size */ public final static Stream> groupedBySizeAndTime(final Stream stream, final int size, final long time, final TimeUnit t) { return StreamSupport.stream(new GroupedByTimeAndSizeSpliterator(stream.spliterator(),()->Seq.fromIterable(new ArrayList<>(size)), Function.identity(),size,time,t),stream.isParallel()); } /** * Group a Stream by size and time constraints * * @see ReactiveSeq#groupedBySizeAndTime(int, long, TimeUnit, Supplier) * * @param stream Stream to group * @param size Max group size * @param time Max group time * @param t Time unit for max group time * @param factory Supplier to create toX for groupings * @return Stream grouped by time and size */ public final static > Stream groupedBySizeAndTime(final Stream stream, final int size, final long time, final TimeUnit t, final Supplier factory) { return StreamSupport.stream(new GroupedByTimeAndSizeSpliterator(stream.spliterator(),factory, Function.identity(),size,time,t),stream.isParallel()); } /** * Allow one element through per time period, drop all other elements in * that time period * * @see ReactiveSeq#debounce(long, TimeUnit) * * @param stream Stream to debounce * @param time Time to applyHKT debouncing over * @param t Time unit for debounce period * @return Stream with debouncing applied */ public final static Stream debounce(final Stream stream, final long time, final TimeUnit t) { return new DebounceOperator<>( stream).debounce(time, t); } /** * emit one element per time period * * @see ReactiveSeq#onePer(long, TimeUnit) * * @param stream Stream to emit one element per time period from * @param time Time period * @param t Time Pure * @return Stream with slowed emission */ public final static Stream onePer(final Stream stream, final long time, final TimeUnit t) { return new OnePerOperator<>( stream).onePer(time, t); } /** * Introduce a random jitter / time delay between the emission of elements * * @see ReactiveSeq#jitter(long) * * @param stream Stream to introduce jitter to * @param jitterInNanos Max jitter period - random number less than this is used for each jitter * @return Jittered Stream */ public final static Stream jitter(final Stream stream, final long jitterInNanos) { final Iterator it = stream.iterator(); final Random r = new Random(); return Streams.stream(new Iterator() { @Override public boolean hasNext() { return it.hasNext(); } @Override public T next() { final T nextValue = it.next(); try { final long elapsedNanos = (long) (jitterInNanos * r.nextDouble()); final long millis = elapsedNanos / 1000000; final int nanos = (int) (elapsedNanos - millis * 1000000); Thread.sleep(Math.max(0, millis), Math.max(0, nanos)); } catch (final InterruptedException e) { throw ExceptionSoftener.throwSoftenedException(e); } return nextValue; } }); } public final static Stream fixedDelay(final Stream stream, final long time, final TimeUnit unit) { final Iterator it = stream.iterator(); return Streams.stream(new Iterator() { @Override public boolean hasNext() { return it.hasNext(); } @Override public T next() { final T nextValue = it.next(); try { final long elapsedNanos = unit.toNanos(time); final long millis = elapsedNanos / 1000000; final int nanos = (int) (elapsedNanos - millis * 1000000); Thread.sleep(Math.max(0, millis), Math.max(0, nanos)); } catch (final InterruptedException e) { throw ExceptionSoftener.throwSoftenedException(e); } return nextValue; } }); } public final static Stream xPer(final Stream stream, final int x, final long time, final TimeUnit t) { final Iterator it = stream.iterator(); final long next = t.toNanos(time); return Streams.stream(new Iterator() { volatile long last = -1; volatile int count = 0; @Override public boolean hasNext() { return it.hasNext(); } @Override public T next() { final T nextValue = it.next(); if (++count < x) return nextValue; count = 0; LockSupport.parkNanos(next - (System.nanoTime() - last)); last = System.nanoTime(); return nextValue; } }); } public final static Connectable hotStream(final Stream stream, final Executor exec) { return new NonPausableConnectable<>( stream).init(exec); } public final static Connectable primedHotStream(final Stream stream, final Executor exec) { return new NonPausableConnectable<>( stream).paused(exec); } public final static PausableConnectable pausableHotStream(final Stream stream, final Executor exec) { return new PausableConnectableImpl<>( stream).init(exec); } public final static PausableConnectable primedPausableHotStream(final Stream stream, final Executor exec) { return new PausableConnectableImpl<>( stream).paused(exec); } public static Stream tailRec(T initial, Function>> fn) { return ReactiveSeq.tailRec(initial,fn.andThen(ReactiveSeq::fromStream)).stream(); } }