org.springframework.data.redis.stream.StreamReceiver Maven / Gradle / Ivy
/*
* Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.redis.stream;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.nio.ByteBuffer;
import java.time.Duration;
import java.util.OptionalInt;
import java.util.function.Function;
import org.reactivestreams.Publisher;
import org.springframework.data.redis.connection.ReactiveRedisConnectionFactory;
import org.springframework.data.redis.connection.stream.Consumer;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.ObjectRecord;
import org.springframework.data.redis.connection.stream.ReadOffset;
import org.springframework.data.redis.connection.stream.Record;
import org.springframework.data.redis.connection.stream.StreamOffset;
import org.springframework.data.redis.hash.HashMapper;
import org.springframework.data.redis.hash.ObjectHashMapper;
import org.springframework.data.redis.serializer.RedisSerializationContext;
import org.springframework.data.redis.serializer.RedisSerializationContext.SerializationPair;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* A receiver to consume Redis Streams using reactive infrastructure.
*
* Once created, a {@link StreamReceiver} can subscribe to a Redis Stream and consume incoming {@link Record records}.
* Consider a {@link Flux} of {@link Record} infinite. Cancelling the {@link org.reactivestreams.Subscription}
* terminates eventually background polling. Records are converted using {@link SerializationPair key and value
* serializers} to support various serialization strategies.
* {@link StreamReceiver} supports three modes of stream consumption:
*
* - Standalone
* - Using a {@link Consumer} with external
* {@link org.springframework.data.redis.core.ReactiveStreamOperations#acknowledge(Object, String, String...)
* acknowledge}
* - Using a {@link Consumer} with auto-acknowledge
*
* Reading from a stream requires polling and a strategy to advance stream offsets. Depending on the initial
* {@link ReadOffset}, {@link StreamReceiver} applies an individual strategy to obtain the next {@link ReadOffset}:
*
* Standalone
*
* - {@link ReadOffset#from(String)} Offset using a particular record Id: Start with the given offset and use the last
* seen {@link Record#getId() record Id}.
* - {@link ReadOffset#lastConsumed()} Last consumed: Start with the latest offset ({@code $}) and use the last seen
* {@link Record#getId() record Id}.
* - {@link ReadOffset#latest()} Last consumed: Start with the latest offset ({@code $}) and use latest offset
* ({@code $}) for subsequent reads.
*
*
* Using {@link Consumer}
*
* - {@link ReadOffset#from(String)} Offset using a particular record Id: Start with the given offset and use the last
* seen {@link Record#getId() record Id}.
* - {@link ReadOffset#lastConsumed()} Last consumed: Start with the last consumed record by the consumer ({@code >})
* and use the last consumed record by the consumer ({@code >}) for subsequent reads.
* - {@link ReadOffset#latest()} Last consumed: Start with the latest offset ({@code $}) and use latest offset
* ({@code $}) for subsequent reads.
*
* Note: Using {@link ReadOffset#latest()} bears the chance of dropped records as records can arrive in the time
* during polling is suspended. Use recordId's as offset or {@link ReadOffset#lastConsumed()} to minimize the chance of
* record loss.
*
* {@link StreamReceiver} propagates errors during stream reads and deserialization as terminal error signal by default.
* Configuring a {@link StreamReceiverOptions#getResumeFunction() resume function} allows conditional resumption by
* dropping the record or by propagating the error to terminate the subscription.
*
* See the following example code how to use {@link StreamReceiver}:
*
*
* ReactiveRedisConnectionFactory factory = …;
*
* StreamReceiver receiver = StreamReceiver.create(factory);
* Flux> records = receiver.receive(StreamOffset.fromStart("my-stream"));
*
* recordFlux.doOnNext(record -> …);
*
*
* @author Mark Paluch
* @author Eddie McDaniel
* @param Stream key and Stream field type.
* @param Stream value type.
* @since 2.2
* @see StreamReceiverOptions#builder()
* @see org.springframework.data.redis.core.ReactiveStreamOperations
* @see ReactiveRedisConnectionFactory
* @see StreamMessageListenerContainer
*/
public interface StreamReceiver> {
/**
* Create a new {@link StreamReceiver} using {@link StringRedisSerializer string serializers} given
* {@link ReactiveRedisConnectionFactory}.
*
* @param connectionFactory must not be {@literal null}.
* @return the new {@link StreamReceiver}.
*/
static StreamReceiver> create(
ReactiveRedisConnectionFactory connectionFactory) {
Assert.notNull(connectionFactory, "ReactiveRedisConnectionFactory must not be null!");
SerializationPair serializationPair = SerializationPair.fromSerializer(StringRedisSerializer.UTF_8);
return create(connectionFactory, StreamReceiverOptions.builder().serializer(serializationPair).build());
}
/**
* Create a new {@link StreamReceiver} given {@link ReactiveRedisConnectionFactory} and {@link StreamReceiverOptions}.
*
* @param connectionFactory must not be {@literal null}.
* @param options must not be {@literal null}.
* @return the new {@link StreamReceiver}.
*/
static > StreamReceiver create(ReactiveRedisConnectionFactory connectionFactory,
StreamReceiverOptions options) {
Assert.notNull(connectionFactory, "ReactiveRedisConnectionFactory must not be null!");
Assert.notNull(options, "StreamReceiverOptions must not be null!");
return new DefaultStreamReceiver<>(connectionFactory, options);
}
/**
* Starts a Redis Stream consumer that consumes {@link Record records} from the {@link StreamOffset stream}. Records
* are consumed from Redis and delivered on the returned {@link Flux} when requests are made on the Flux. The receiver
* is closed when the returned {@link Flux} terminates.
*
* Every record must be acknowledged using
* {@link org.springframework.data.redis.connection.ReactiveStreamCommands#xAck(ByteBuffer, String, String...)}
*
* @param streamOffset the stream along its offset.
* @return Flux of inbound {@link Record}s.
* @see StreamOffset#create(Object, ReadOffset)
*/
Flux receive(StreamOffset streamOffset);
/**
* Starts a Redis Stream consumer that consumes {@link Record records} from the {@link StreamOffset stream}. Records
* are consumed from Redis and delivered on the returned {@link Flux} when requests are made on the Flux. The receiver
* is closed when the returned {@link Flux} terminates.
*
* Every record is acknowledged when received.
*
* @param consumer consumer group, must not be {@literal null}.
* @param streamOffset the stream along its offset.
* @return Flux of inbound {@link Record}s.
* @see StreamOffset#create(Object, ReadOffset)
* @see ReadOffset#lastConsumed()
*/
Flux receiveAutoAck(Consumer consumer, StreamOffset streamOffset);
/**
* Starts a Redis Stream consumer that consumes {@link Record records} from the {@link StreamOffset stream}. Records
* are consumed from Redis and delivered on the returned {@link Flux} when requests are made on the Flux. The receiver
* is closed when the returned {@link Flux} terminates.
*
* Every record must be acknowledged using
* {@link org.springframework.data.redis.core.ReactiveStreamOperations#acknowledge(Object, String, String...)} after
* processing.
*
* @param consumer consumer group, must not be {@literal null}.
* @param streamOffset the stream along its offset.
* @return Flux of inbound {@link Record}s.
* @see StreamOffset#create(Object, ReadOffset)
* @see ReadOffset#lastConsumed()
*/
Flux receive(Consumer consumer, StreamOffset streamOffset);
/**
* Options for {@link StreamReceiver}.
*
* @param Stream key and Stream field type.
* @param Stream value type.
* @see StreamReceiverOptionsBuilder
*/
class StreamReceiverOptions> {
private final Duration pollTimeout;
private final @Nullable Integer batchSize;
private final Function> resumeFunction;
private final SerializationPair keySerializer;
private final SerializationPair