META-INF.modules.java.base.classes.java.util.stream.DoublePipeline Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of java.base Show documentation
Show all versions of java.base Show documentation
Bytecoder java.base Module
/*
* Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util.stream;
import java.util.DoubleSummaryStatistics;
import java.util.Objects;
import java.util.OptionalDouble;
import java.util.PrimitiveIterator;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.DoubleBinaryOperator;
import java.util.function.DoubleConsumer;
import java.util.function.DoubleFunction;
import java.util.function.DoublePredicate;
import java.util.function.DoubleToIntFunction;
import java.util.function.DoubleToLongFunction;
import java.util.function.DoubleUnaryOperator;
import java.util.function.IntFunction;
import java.util.function.ObjDoubleConsumer;
import java.util.function.Supplier;
/**
* Abstract base class for an intermediate pipeline stage or pipeline source
* stage implementing whose elements are of type {@code double}.
*
* @param type of elements in the upstream source
*
* @since 1.8
*/
abstract class DoublePipeline
extends AbstractPipeline
implements DoubleStream {
/**
* Constructor for the head of a stream pipeline.
*
* @param source {@code Supplier} describing the stream source
* @param sourceFlags the source flags for the stream source, described in
* {@link StreamOpFlag}
*/
DoublePipeline(Supplier extends Spliterator> source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
/**
* Constructor for the head of a stream pipeline.
*
* @param source {@code Spliterator} describing the stream source
* @param sourceFlags the source flags for the stream source, described in
* {@link StreamOpFlag}
*/
DoublePipeline(Spliterator source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
/**
* Constructor for appending an intermediate operation onto an existing
* pipeline.
*
* @param upstream the upstream element source.
* @param opFlags the operation flags
*/
DoublePipeline(AbstractPipeline, E_IN, ?> upstream, int opFlags) {
super(upstream, opFlags);
}
/**
* Adapt a {@code Sink to a {@code DoubleConsumer}, ideally simply
* by casting.
*/
private static DoubleConsumer adapt(Sink sink) {
if (sink instanceof DoubleConsumer) {
return (DoubleConsumer) sink;
} else {
if (Tripwire.ENABLED)
Tripwire.trip(AbstractPipeline.class,
"using DoubleStream.adapt(Sink s)");
return sink::accept;
}
}
/**
* Adapt a {@code Spliterator} to a {@code Spliterator.OfDouble}.
*
* @implNote
* The implementation attempts to cast to a Spliterator.OfDouble, and throws
* an exception if this cast is not possible.
*/
private static Spliterator.OfDouble adapt(Spliterator s) {
if (s instanceof Spliterator.OfDouble) {
return (Spliterator.OfDouble) s;
} else {
if (Tripwire.ENABLED)
Tripwire.trip(AbstractPipeline.class,
"using DoubleStream.adapt(Spliterator s)");
throw new UnsupportedOperationException("DoubleStream.adapt(Spliterator s)");
}
}
// Shape-specific methods
@Override
final StreamShape getOutputShape() {
return StreamShape.DOUBLE_VALUE;
}
@Override
final Node evaluateToNode(PipelineHelper helper,
Spliterator spliterator,
boolean flattenTree,
IntFunction generator) {
return Nodes.collectDouble(helper, spliterator, flattenTree);
}
@Override
final Spliterator wrap(PipelineHelper ph,
Supplier> supplier,
boolean isParallel) {
return new StreamSpliterators.DoubleWrappingSpliterator<>(ph, supplier, isParallel);
}
@Override
@SuppressWarnings("unchecked")
final Spliterator.OfDouble lazySpliterator(Supplier extends Spliterator> supplier) {
return new StreamSpliterators.DelegatingSpliterator.OfDouble((Supplier) supplier);
}
@Override
final boolean forEachWithCancel(Spliterator spliterator, Sink sink) {
Spliterator.OfDouble spl = adapt(spliterator);
DoubleConsumer adaptedSink = adapt(sink);
boolean cancelled;
do { } while (!(cancelled = sink.cancellationRequested()) && spl.tryAdvance(adaptedSink));
return cancelled;
}
@Override
final Node.Builder makeNodeBuilder(long exactSizeIfKnown, IntFunction generator) {
return Nodes.doubleBuilder(exactSizeIfKnown);
}
private Stream mapToObj(DoubleFunction extends U> mapper, int opFlags) {
return new ReferencePipeline.StatelessOp(this, StreamShape.DOUBLE_VALUE, opFlags) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return new Sink.ChainedDouble(sink) {
@Override
public void accept(double t) {
downstream.accept(mapper.apply(t));
}
};
}
};
}
// DoubleStream
@Override
public final PrimitiveIterator.OfDouble iterator() {
return Spliterators.iterator(spliterator());
}
@Override
public final Spliterator.OfDouble spliterator() {
return adapt(super.spliterator());
}
// Stateless intermediate ops from DoubleStream
@Override
public final Stream boxed() {
return mapToObj(Double::valueOf, 0);
}
@Override
public final DoubleStream map(DoubleUnaryOperator mapper) {
Objects.requireNonNull(mapper);
return new StatelessOp(this, StreamShape.DOUBLE_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return new Sink.ChainedDouble(sink) {
@Override
public void accept(double t) {
downstream.accept(mapper.applyAsDouble(t));
}
};
}
};
}
@Override
public final Stream mapToObj(DoubleFunction extends U> mapper) {
Objects.requireNonNull(mapper);
return mapToObj(mapper, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT);
}
@Override
public final IntStream mapToInt(DoubleToIntFunction mapper) {
Objects.requireNonNull(mapper);
return new IntPipeline.StatelessOp(this, StreamShape.DOUBLE_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return new Sink.ChainedDouble(sink) {
@Override
public void accept(double t) {
downstream.accept(mapper.applyAsInt(t));
}
};
}
};
}
@Override
public final LongStream mapToLong(DoubleToLongFunction mapper) {
Objects.requireNonNull(mapper);
return new LongPipeline.StatelessOp(this, StreamShape.DOUBLE_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return new Sink.ChainedDouble(sink) {
@Override
public void accept(double t) {
downstream.accept(mapper.applyAsLong(t));
}
};
}
};
}
@Override
public final DoubleStream flatMap(DoubleFunction extends DoubleStream> mapper) {
Objects.requireNonNull(mapper);
return new StatelessOp(this, StreamShape.DOUBLE_VALUE,
StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT | StreamOpFlag.NOT_SIZED) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return new Sink.ChainedDouble(sink) {
// true if cancellationRequested() has been called
boolean cancellationRequestedCalled;
// cache the consumer to avoid creation on every accepted element
DoubleConsumer downstreamAsDouble = downstream::accept;
@Override
public void begin(long size) {
downstream.begin(-1);
}
@Override
public void accept(double t) {
try (DoubleStream result = mapper.apply(t)) {
if (result != null) {
if (!cancellationRequestedCalled) {
result.sequential().forEach(downstreamAsDouble);
}
else {
var s = result.sequential().spliterator();
do { } while (!downstream.cancellationRequested() && s.tryAdvance(downstreamAsDouble));
}
}
}
}
@Override
public boolean cancellationRequested() {
// If this method is called then an operation within the stream
// pipeline is short-circuiting (see AbstractPipeline.copyInto).
// Note that we cannot differentiate between an upstream or
// downstream operation
cancellationRequestedCalled = true;
return downstream.cancellationRequested();
}
};
}
};
}
@Override
public DoubleStream unordered() {
if (!isOrdered())
return this;
return new StatelessOp(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_ORDERED) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return sink;
}
};
}
@Override
public final DoubleStream filter(DoublePredicate predicate) {
Objects.requireNonNull(predicate);
return new StatelessOp(this, StreamShape.DOUBLE_VALUE,
StreamOpFlag.NOT_SIZED) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return new Sink.ChainedDouble(sink) {
@Override
public void begin(long size) {
downstream.begin(-1);
}
@Override
public void accept(double t) {
if (predicate.test(t))
downstream.accept(t);
}
};
}
};
}
@Override
public final DoubleStream peek(DoubleConsumer action) {
Objects.requireNonNull(action);
return new StatelessOp(this, StreamShape.DOUBLE_VALUE,
0) {
@Override
Sink opWrapSink(int flags, Sink sink) {
return new Sink.ChainedDouble(sink) {
@Override
public void accept(double t) {
action.accept(t);
downstream.accept(t);
}
};
}
};
}
// Stateful intermediate ops from DoubleStream
@Override
public final DoubleStream limit(long maxSize) {
if (maxSize < 0)
throw new IllegalArgumentException(Long.toString(maxSize));
return SliceOps.makeDouble(this, (long) 0, maxSize);
}
@Override
public final DoubleStream skip(long n) {
if (n < 0)
throw new IllegalArgumentException(Long.toString(n));
if (n == 0)
return this;
else {
long limit = -1;
return SliceOps.makeDouble(this, n, limit);
}
}
@Override
public final DoubleStream takeWhile(DoublePredicate predicate) {
return WhileOps.makeTakeWhileDouble(this, predicate);
}
@Override
public final DoubleStream dropWhile(DoublePredicate predicate) {
return WhileOps.makeDropWhileDouble(this, predicate);
}
@Override
public final DoubleStream sorted() {
return SortedOps.makeDouble(this);
}
@Override
public final DoubleStream distinct() {
// While functional and quick to implement, this approach is not very efficient.
// An efficient version requires a double-specific map/set implementation.
return boxed().distinct().mapToDouble(i -> (double) i);
}
// Terminal ops from DoubleStream
@Override
public void forEach(DoubleConsumer consumer) {
evaluate(ForEachOps.makeDouble(consumer, false));
}
@Override
public void forEachOrdered(DoubleConsumer consumer) {
evaluate(ForEachOps.makeDouble(consumer, true));
}
@Override
public final double sum() {
/*
* In the arrays allocated for the collect operation, index 0
* holds the high-order bits of the running sum, index 1 holds
* the low-order bits of the sum computed via compensated
* summation, and index 2 holds the simple sum used to compute
* the proper result if the stream contains infinite values of
* the same sign.
*/
double[] summation = collect(() -> new double[3],
(ll, d) -> {
Collectors.sumWithCompensation(ll, d);
ll[2] += d;
},
(ll, rr) -> {
Collectors.sumWithCompensation(ll, rr[0]);
Collectors.sumWithCompensation(ll, rr[1]);
ll[2] += rr[2];
});
return Collectors.computeFinalSum(summation);
}
@Override
public final OptionalDouble min() {
return reduce(Math::min);
}
@Override
public final OptionalDouble max() {
return reduce(Math::max);
}
/**
* {@inheritDoc}
*
* @implNote The {@code double} format can represent all
* consecutive integers in the range -253 to
* 253. If the pipeline has more than 253
* values, the divisor in the average computation will saturate at
* 253, leading to additional numerical errors.
*/
@Override
public final OptionalDouble average() {
/*
* In the arrays allocated for the collect operation, index 0
* holds the high-order bits of the running sum, index 1 holds
* the low-order bits of the sum computed via compensated
* summation, index 2 holds the number of values seen, index 3
* holds the simple sum.
*/
double[] avg = collect(() -> new double[4],
(ll, d) -> {
ll[2]++;
Collectors.sumWithCompensation(ll, d);
ll[3] += d;
},
(ll, rr) -> {
Collectors.sumWithCompensation(ll, rr[0]);
Collectors.sumWithCompensation(ll, rr[1]);
ll[2] += rr[2];
ll[3] += rr[3];
});
return avg[2] > 0
? OptionalDouble.of(Collectors.computeFinalSum(avg) / avg[2])
: OptionalDouble.empty();
}
@Override
public final long count() {
return evaluate(ReduceOps.makeDoubleCounting());
}
@Override
public final DoubleSummaryStatistics summaryStatistics() {
return collect(DoubleSummaryStatistics::new, DoubleSummaryStatistics::accept,
DoubleSummaryStatistics::combine);
}
@Override
public final double reduce(double identity, DoubleBinaryOperator op) {
return evaluate(ReduceOps.makeDouble(identity, op));
}
@Override
public final OptionalDouble reduce(DoubleBinaryOperator op) {
return evaluate(ReduceOps.makeDouble(op));
}
@Override
public final R collect(Supplier supplier,
ObjDoubleConsumer accumulator,
BiConsumer combiner) {
Objects.requireNonNull(combiner);
BinaryOperator operator = (left, right) -> {
combiner.accept(left, right);
return left;
};
return evaluate(ReduceOps.makeDouble(supplier, accumulator, operator));
}
@Override
public final boolean anyMatch(DoublePredicate predicate) {
return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.ANY));
}
@Override
public final boolean allMatch(DoublePredicate predicate) {
return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.ALL));
}
@Override
public final boolean noneMatch(DoublePredicate predicate) {
return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.NONE));
}
@Override
public final OptionalDouble findFirst() {
return evaluate(FindOps.makeDouble(true));
}
@Override
public final OptionalDouble findAny() {
return evaluate(FindOps.makeDouble(false));
}
@Override
public final double[] toArray() {
return Nodes.flattenDouble((Node.OfDouble) evaluateToArrayNode(Double[]::new))
.asPrimitiveArray();
}
//
/**
* Source stage of a DoubleStream
*
* @param type of elements in the upstream source
*/
static class Head extends DoublePipeline {
/**
* Constructor for the source stage of a DoubleStream.
*
* @param source {@code Supplier} describing the stream
* source
* @param sourceFlags the source flags for the stream source, described
* in {@link StreamOpFlag}
* @param parallel {@code true} if the pipeline is parallel
*/
Head(Supplier extends Spliterator> source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
/**
* Constructor for the source stage of a DoubleStream.
*
* @param source {@code Spliterator} describing the stream source
* @param sourceFlags the source flags for the stream source, described
* in {@link StreamOpFlag}
* @param parallel {@code true} if the pipeline is parallel
*/
Head(Spliterator source,
int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
@Override
final boolean opIsStateful() {
throw new UnsupportedOperationException();
}
@Override
final Sink opWrapSink(int flags, Sink sink) {
throw new UnsupportedOperationException();
}
// Optimized sequential terminal operations for the head of the pipeline
@Override
public void forEach(DoubleConsumer consumer) {
if (!isParallel()) {
adapt(sourceStageSpliterator()).forEachRemaining(consumer);
}
else {
super.forEach(consumer);
}
}
@Override
public void forEachOrdered(DoubleConsumer consumer) {
if (!isParallel()) {
adapt(sourceStageSpliterator()).forEachRemaining(consumer);
}
else {
super.forEachOrdered(consumer);
}
}
}
/**
* Base class for a stateless intermediate stage of a DoubleStream.
*
* @param type of elements in the upstream source
* @since 1.8
*/
abstract static class StatelessOp extends DoublePipeline {
/**
* Construct a new DoubleStream by appending a stateless intermediate
* operation to an existing stream.
*
* @param upstream the upstream pipeline stage
* @param inputShape the stream shape for the upstream pipeline stage
* @param opFlags operation flags for the new stage
*/
StatelessOp(AbstractPipeline, E_IN, ?> upstream,
StreamShape inputShape,
int opFlags) {
super(upstream, opFlags);
assert upstream.getOutputShape() == inputShape;
}
@Override
final boolean opIsStateful() {
return false;
}
}
/**
* Base class for a stateful intermediate stage of a DoubleStream.
*
* @param type of elements in the upstream source
* @since 1.8
*/
abstract static class StatefulOp extends DoublePipeline {
/**
* Construct a new DoubleStream by appending a stateful intermediate
* operation to an existing stream.
*
* @param upstream the upstream pipeline stage
* @param inputShape the stream shape for the upstream pipeline stage
* @param opFlags operation flags for the new stage
*/
StatefulOp(AbstractPipeline, E_IN, ?> upstream,
StreamShape inputShape,
int opFlags) {
super(upstream, opFlags);
assert upstream.getOutputShape() == inputShape;
}
@Override
final boolean opIsStateful() {
return true;
}
@Override
abstract Node opEvaluateParallel(PipelineHelper helper,
Spliterator spliterator,
IntFunction generator);
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy