
org.apache.pekko.stream.javadsl.SourceWithContext.scala Maven / Gradle / Ivy
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* license agreements; and to You under the Apache License, version 2.0:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* This file is part of the Apache Pekko project, which was derived from Akka.
*/
/*
* Copyright (C) 2014-2022 Lightbend Inc.
*/
package org.apache.pekko.stream.javadsl
import java.util.Optional
import java.util.concurrent.CompletionStage
import scala.annotation.unchecked.uncheckedVariance
import org.apache.pekko
import pekko.actor.ClassicActorSystemProvider
import pekko.annotation.ApiMayChange
import pekko.event.{ LogMarker, LoggingAdapter, MarkerLoggingAdapter }
import pekko.japi.Pair
import pekko.japi.function
import pekko.stream._
import pekko.util.ConstantFun
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
import pekko.util.ccompat.JavaConverters._
object SourceWithContext {
/**
* Creates a SourceWithContext from a regular flow that operates on `Pair` elements.
*/
def fromPairs[Out, CtxOut, Mat](under: Source[Pair[Out, CtxOut], Mat]): SourceWithContext[Out, CtxOut, Mat] = {
new SourceWithContext(scaladsl.SourceWithContext.fromTuples(under.asScala.map(_.toScala)))
}
/**
* Creates a SourceWithContext from an existing base SourceWithContext outputting an optional element
* and applying an additional viaFlow only if the element in the stream is defined.
*
* '''Emits when''' the provided viaFlow runs with defined elements
*
* '''Backpressures when''' the viaFlow runs for the defined elements and downstream backpressures
*
* '''Completes when''' upstream completes
*
* '''Cancels when''' downstream cancels
*
* @param source The base source that outputs an optional element
* @param viaFlow The flow that gets used if the optional element in is defined. This flow only works
* on the data portion of flow and ignores the context so this flow *must* not re-order,
* drop or emit multiple elements for one incoming element
* @param combine How to combine the materialized values of source and viaFlow
* @return a SourceWithContext with the viaFlow applied onto defined elements of the flow. The output value
* is contained within an Optional which indicates whether the original source's element had viaFlow
* applied.
* @since 1.1.0
*/
@ApiMayChange
def unsafeOptionalDataVia[SOut, FOut, Ctx, SMat, FMat, Mat](source: SourceWithContext[Optional[SOut], Ctx, SMat],
viaFlow: Flow[SOut, FOut, FMat],
combine: function.Function2[SMat, FMat, Mat]
): SourceWithContext[Optional[FOut], Ctx, Mat] =
scaladsl.SourceWithContext.unsafeOptionalDataVia(source.map(_.toScala).asScala, viaFlow.asScala)(
combinerToScala(combine)).map(
_.toJava).asJava
}
/**
* A source that provides operations which automatically propagate the context of an element.
* Only a subset of common operations from [[Source]] is supported. As an escape hatch you can
* use [[SourceWithContext#via]] to manually provide the context propagation for otherwise unsupported
* operations.
*
* Can be created by calling [[Source.asSourceWithContext]]
*/
final class SourceWithContext[+Out, +Ctx, +Mat](delegate: scaladsl.SourceWithContext[Out, Ctx, Mat])
extends GraphDelegate(delegate) {
/**
* Transform this flow by the regular flow. The given flow must support manual context propagation by
* taking and producing tuples of (data, context).
*
* It is up to the implementer to ensure the inner flow does not exhibit any behavior that is not expected
* by the downstream elements, such as reordering. For more background on these requirements
* see https://pekko.apache.org/docs/pekko/current/stream/stream-context.html.
*
* This can be used as an escape hatch for operations that are not (yet) provided with automatic
* context propagation here.
*
* @see [[pekko.stream.javadsl.Flow.via]]
*/
def via[Out2, Ctx2, Mat2](
viaFlow: Graph[FlowShape[Pair[Out @uncheckedVariance, Ctx @uncheckedVariance], Pair[Out2, Ctx2]], Mat2])
: SourceWithContext[Out2, Ctx2, Mat] =
viaScala(_.via(pekko.stream.scaladsl.Flow[(Out, Ctx)].map { case (o, c) => Pair(o, c) }.via(viaFlow).map(
_.toScala)))
/**
* Transform this flow by the regular flow. The given flow works on the data portion of the stream and
* ignores the context.
*
* The given flow *must* not re-order, drop or emit multiple elements for one incoming
* element, the sequence of incoming contexts is re-combined with the outgoing
* elements of the stream. If a flow not fulfilling this requirement is used the stream
* will not fail but continue running in a corrupt state and re-combine incorrect pairs
* of elements and contexts or deadlock.
*
* For more background on these requirements
* see https://pekko.apache.org/docs/pekko/current/stream/stream-context.html.
*/
@ApiMayChange def unsafeDataVia[Out2, Mat2](
viaFlow: Graph[FlowShape[Out @uncheckedVariance, Out2], Mat2]): SourceWithContext[Out2, Ctx, Mat] =
viaScala(_.unsafeDataVia(viaFlow))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.withAttributes]].
*
* @see [[pekko.stream.javadsl.Source.withAttributes]]
*/
override def withAttributes(attr: Attributes): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.withAttributes(attr))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.mapError]].
*
* @see [[pekko.stream.javadsl.Source.mapError]]
*/
def mapError(pf: PartialFunction[Throwable, Throwable]): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.mapError(pf))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.mapMaterializedValue]].
*
* @see [[pekko.stream.javadsl.Flow.mapMaterializedValue]]
*/
def mapMaterializedValue[Mat2](f: function.Function[Mat, Mat2]): SourceWithContext[Out, Ctx, Mat2] =
viaScala(_.mapMaterializedValue(f.apply _))
/**
* Stops automatic context propagation from here and converts this to a regular
* stream of a pair of (data, context).
*/
def asSource(): Source[Pair[Out @uncheckedVariance, Ctx @uncheckedVariance], Mat @uncheckedVariance] =
delegate.asSource.map { case (o, c) => Pair(o, c) }.asJava
// remaining operations in alphabetic order
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.collect]].
*
* Note, that the context of elements that are filtered out is skipped as well.
*
* @see [[pekko.stream.javadsl.Source.collect]]
*/
def collect[Out2](pf: PartialFunction[Out, Out2]): SourceWithContext[Out2, Ctx, Mat] =
viaScala(_.collect(pf))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.filter]].
*
* Note, that the context of elements that are filtered out is skipped as well.
*
* @see [[pekko.stream.javadsl.Source.filter]]
*/
def filter(p: function.Predicate[Out]): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.filter(p.test))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.filterNot]].
*
* Note, that the context of elements that are filtered out is skipped as well.
*
* @see [[pekko.stream.javadsl.Source.filterNot]]
*/
def filterNot(p: function.Predicate[Out]): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.filterNot(p.test))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.grouped]].
*
* Each output group will be associated with a `Seq` of corresponding context elements.
*
* @see [[pekko.stream.javadsl.Source.grouped]]
*/
def grouped(
n: Int): SourceWithContext[java.util.List[Out @uncheckedVariance], java.util.List[Ctx @uncheckedVariance], Mat] =
viaScala(_.grouped(n).map(_.asJava).mapContext(_.asJava))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.map]].
*
* @see [[pekko.stream.javadsl.Source.map]]
*/
def map[Out2](f: function.Function[Out, Out2]): SourceWithContext[Out2, Ctx, Mat] =
viaScala(_.map(f.apply))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.mapAsync]].
*
* @see [[pekko.stream.javadsl.Source.mapAsync]]
*/
def mapAsync[Out2](
parallelism: Int,
f: function.Function[Out, CompletionStage[Out2]]): SourceWithContext[Out2, Ctx, Mat] =
viaScala(_.mapAsync[Out2](parallelism)(o => f.apply(o).asScala))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.mapAsyncPartitioned]].
*
* @since 1.1.0
* @see [[pekko.stream.javadsl.Source.mapAsyncPartitioned]]
*/
def mapAsyncPartitioned[Out2, P](
parallelism: Int,
partitioner: function.Function[Out, P],
f: function.Function2[Out, P, CompletionStage[Out2]]): SourceWithContext[Out2, Ctx, Mat] = {
viaScala(_.mapAsyncPartitioned(parallelism)(partitioner(_))(f(_, _).asScala))
}
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.mapAsyncPartitionedUnordered]].
*
* @since 1.1.0
* @see [[pekko.stream.javadsl.Source.mapAsyncPartitionedUnordered]]
*/
def mapAsyncPartitionedUnordered[Out2, P](
parallelism: Int,
partitioner: function.Function[Out, P],
f: function.Function2[Out, P, CompletionStage[Out2]]): SourceWithContext[Out2, Ctx, Mat] =
viaScala(_.mapAsyncPartitionedUnordered(parallelism)(partitioner(_))(f(_, _).asScala))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.mapConcat]].
*
* The context of the input element will be associated with each of the output elements calculated from
* this input element.
*
* Example:
*
* ```
* def dup(element: String) = Seq(element, element)
*
* Input:
*
* ("a", 1)
* ("b", 2)
*
* inputElements.mapConcat(dup)
*
* Output:
*
* ("a", 1)
* ("a", 1)
* ("b", 2)
* ("b", 2)
* ```
*
* @see [[pekko.stream.javadsl.Source.mapConcat]]
*/
def mapConcat[Out2](f: function.Function[Out, _ <: java.lang.Iterable[Out2]]): SourceWithContext[Out2, Ctx, Mat] =
viaScala(_.mapConcat(elem => f.apply(elem).asScala))
/**
* Apply the given function to each context element (leaving the data elements unchanged).
*/
def mapContext[Ctx2](extractContext: function.Function[Ctx, Ctx2]): SourceWithContext[Out, Ctx2, Mat] =
viaScala(_.mapContext(extractContext.apply))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.sliding]].
*
* Each output group will be associated with a `Seq` of corresponding context elements.
*
* @see [[pekko.stream.javadsl.Source.sliding]]
*/
def sliding(n: Int, step: Int = 1)
: SourceWithContext[java.util.List[Out @uncheckedVariance], java.util.List[Ctx @uncheckedVariance], Mat] =
viaScala(_.sliding(n, step).map(_.asJava).mapContext(_.asJava))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.log]].
*
* @see [[pekko.stream.javadsl.Source.log]]
*/
def log(name: String, extract: function.Function[Out, Any], log: LoggingAdapter): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.log(name, e => extract.apply(e))(log))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Flow.log]].
*
* @see [[pekko.stream.javadsl.Flow.log]]
*/
def log(name: String, extract: function.Function[Out, Any]): SourceWithContext[Out, Ctx, Mat] =
this.log(name, extract, null)
/**
* Context-preserving variant of [[pekko.stream.javadsl.Flow.log]].
*
* @see [[pekko.stream.javadsl.Flow.log]]
*/
def log(name: String, log: LoggingAdapter): SourceWithContext[Out, Ctx, Mat] =
this.log(name, ConstantFun.javaIdentityFunction[Out], log)
/**
* Context-preserving variant of [[pekko.stream.javadsl.Flow.log]].
*
* @see [[pekko.stream.javadsl.Flow.log]]
*/
def log(name: String): SourceWithContext[Out, Ctx, Mat] =
this.log(name, ConstantFun.javaIdentityFunction[Out], null)
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.logWithMarker]].
*
* @see [[pekko.stream.javadsl.Source.logWithMarker]]
*/
def logWithMarker(
name: String,
marker: function.Function2[Out, Ctx, LogMarker],
extract: function.Function[Out, Any],
log: MarkerLoggingAdapter): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.logWithMarker(name, (e, c) => marker.apply(e, c), e => extract.apply(e))(log))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Flow.logWithMarker]].,
*
* @see [[pekko.stream.javadsl.Flow.logWithMarker]]
*/
def logWithMarker(
name: String,
marker: function.Function2[Out, Ctx, LogMarker],
extract: function.Function[Out, Any]): SourceWithContext[Out, Ctx, Mat] =
this.logWithMarker(name, marker, extract, null)
/**
* Context-preserving variant of [[pekko.stream.javadsl.Flow.logWithMarker]].
*
* @see [[pekko.stream.javadsl.Flow.logWithMarker]]
*/
def logWithMarker(
name: String,
marker: function.Function2[Out, Ctx, LogMarker],
log: MarkerLoggingAdapter): SourceWithContext[Out, Ctx, Mat] =
this.logWithMarker(name, marker, ConstantFun.javaIdentityFunction[Out], log)
/**
* Context-preserving variant of [[pekko.stream.javadsl.Flow.logWithMarker]].
*
* @see [[pekko.stream.javadsl.Flow.logWithMarker]]
*/
def logWithMarker(name: String, marker: function.Function2[Out, Ctx, LogMarker]): SourceWithContext[Out, Ctx, Mat] =
this.logWithMarker(name, marker, ConstantFun.javaIdentityFunction[Out], null)
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.throttle]].
*
* @see [[pekko.stream.javadsl.Source.throttle]]
*/
def throttle(elements: Int, per: java.time.Duration): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.throttle(elements, per.asScala))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.throttle]].
*
* @see [[pekko.stream.javadsl.Source.throttle]]
*/
def throttle(
elements: Int,
per: java.time.Duration,
maximumBurst: Int,
mode: ThrottleMode): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.throttle(elements, per.asScala, maximumBurst, mode))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.throttle]].
*
* @see [[pekko.stream.javadsl.Source.throttle]]
*/
def throttle(
cost: Int,
per: java.time.Duration,
costCalculation: function.Function[Out, Integer]): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.throttle(cost, per.asScala, costCalculation.apply))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.throttle]].
*
* @see [[pekko.stream.javadsl.Source.throttle]]
*/
def throttle(
cost: Int,
per: java.time.Duration,
maximumBurst: Int,
costCalculation: function.Function[Out, Integer],
mode: ThrottleMode): SourceWithContext[Out, Ctx, Mat] =
viaScala(_.throttle(cost, per.asScala, maximumBurst, costCalculation.apply, mode))
/**
* Connect this [[pekko.stream.javadsl.SourceWithContext]] to a [[pekko.stream.javadsl.Sink]],
* concatenating the processing steps of both.
*/
def to[Mat2](
sink: Graph[SinkShape[Pair[Out @uncheckedVariance, Ctx @uncheckedVariance]], Mat2]): javadsl.RunnableGraph[Mat] =
RunnableGraph.fromGraph(asScala.asSource.map { case (o, e) => Pair(o, e) }.to(sink))
/**
* Connect this [[pekko.stream.javadsl.SourceWithContext]] to a [[pekko.stream.javadsl.Sink]],
* concatenating the processing steps of both.
*/
def toMat[Mat2, Mat3](
sink: Graph[SinkShape[Pair[Out @uncheckedVariance, Ctx @uncheckedVariance]], Mat2],
combine: function.Function2[Mat, Mat2, Mat3]): javadsl.RunnableGraph[Mat3] =
RunnableGraph.fromGraph(asScala.asSource.map { case (o, e) => Pair(o, e) }.toMat(sink)(combinerToScala(combine)))
/**
* Connect this [[pekko.stream.javadsl.SourceWithContext]] to a [[pekko.stream.javadsl.Sink]] and run it.
* The returned value is the materialized value of the `Sink`.
*/
def runWith[M](
sink: Graph[SinkShape[Pair[Out @uncheckedVariance, Ctx @uncheckedVariance]], M],
systemProvider: ClassicActorSystemProvider): M =
toMat(sink, Keep.right[Mat, M]).run(systemProvider.classicSystem)
/**
* Connect this [[pekko.stream.javadsl.SourceWithContext]] to a [[pekko.stream.javadsl.Sink]] and run it.
* The returned value is the materialized value of the `Sink`.
*
* Prefer the method taking an ActorSystem unless you have special requirements.
*/
def runWith[M](
sink: Graph[SinkShape[Pair[Out @uncheckedVariance, Ctx @uncheckedVariance]], M],
materializer: Materializer): M =
toMat(sink, Keep.right[Mat, M]).run(materializer)
def asScala: scaladsl.SourceWithContext[Out, Ctx, Mat] = delegate
private[this] def viaScala[Out2, Ctx2, Mat2](
f: scaladsl.SourceWithContext[Out, Ctx, Mat] => scaladsl.SourceWithContext[Out2, Ctx2, Mat2])
: SourceWithContext[Out2, Ctx2, Mat2] =
new SourceWithContext(f(delegate))
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy