All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.kafka.streams.kstream.Consumed Maven / Gradle / Ivy

/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements. See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License. You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.apache.kafka.streams.kstream;

import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.processor.TimestampExtractor;

import java.util.Objects;

/**
 * The {@code Consumed} class is used to define the optional parameters when using {@link StreamsBuilder} to
 * build instances of {@link KStream}, {@link KTable}, and {@link GlobalKTable}.
 * 

* For example, you can read a topic as {@link KStream} with a custom timestamp extractor and specify the corresponding * key and value serdes like: *

{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * KStream stream = builder.stream(
 *   "topicName",
 *   Consumed.with(Serdes.String(), Serdes.Long())
 *           .withTimestampExtractor(new LogAndSkipOnInvalidTimestamp()));
 * }
* Similarly, you can read a topic as {@link KTable} with a custom {@code auto.offset.reset} configuration and force a * state store {@link org.apache.kafka.streams.kstream.Materialized materialization} to access the content via * interactive queries: *
{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * KTable table = builder.table(
 *   "topicName",
 *   Consumed.with(AutoOffsetReset.LATEST),
 *   Materialized.as("queryable-store-name"));
 * }
* * @param type of record key * @param type of record value */ public class Consumed implements NamedOperation> { protected Serde keySerde; protected Serde valueSerde; protected TimestampExtractor timestampExtractor; protected Topology.AutoOffsetReset resetPolicy; protected String processorName; private Consumed(final Serde keySerde, final Serde valueSerde, final TimestampExtractor timestampExtractor, final Topology.AutoOffsetReset resetPolicy, final String processorName) { this.keySerde = keySerde; this.valueSerde = valueSerde; this.timestampExtractor = timestampExtractor; this.resetPolicy = resetPolicy; this.processorName = processorName; } /** * Create an instance of {@link Consumed} from an existing instance. * @param consumed the instance of {@link Consumed} to copy */ protected Consumed(final Consumed consumed) { this(consumed.keySerde, consumed.valueSerde, consumed.timestampExtractor, consumed.resetPolicy, consumed.processorName ); } /** * Create an instance of {@link Consumed} with the supplied arguments. {@code null} values are acceptable. * * @param keySerde the key serde. If {@code null} the default key serde from config will be used * @param valueSerde the value serde. If {@code null} the default value serde from config will be used * @param timestampExtractor the timestamp extractor to used. If {@code null} the default timestamp extractor from config will be used * @param resetPolicy the offset reset policy to be used. If {@code null} the default reset policy from config will be used * @param key type * @param value type * @return a new instance of {@link Consumed} */ public static Consumed with(final Serde keySerde, final Serde valueSerde, final TimestampExtractor timestampExtractor, final Topology.AutoOffsetReset resetPolicy) { return new Consumed<>(keySerde, valueSerde, timestampExtractor, resetPolicy, null); } /** * Create an instance of {@link Consumed} with key and value {@link Serde}s. * * @param keySerde the key serde. If {@code null} the default key serde from config will be used * @param valueSerde the value serde. If {@code null} the default value serde from config will be used * @param key type * @param value type * @return a new instance of {@link Consumed} */ public static Consumed with(final Serde keySerde, final Serde valueSerde) { return new Consumed<>(keySerde, valueSerde, null, null, null); } /** * Create an instance of {@link Consumed} with a {@link TimestampExtractor}. * * @param timestampExtractor the timestamp extractor to used. If {@code null} the default timestamp extractor from config will be used * @param key type * @param value type * @return a new instance of {@link Consumed} */ public static Consumed with(final TimestampExtractor timestampExtractor) { return new Consumed<>(null, null, timestampExtractor, null, null); } /** * Create an instance of {@link Consumed} with a {@link org.apache.kafka.streams.Topology.AutoOffsetReset Topology.AutoOffsetReset}. * * @param resetPolicy the offset reset policy to be used. If {@code null} the default reset policy from config will be used * @param key type * @param value type * @return a new instance of {@link Consumed} */ public static Consumed with(final Topology.AutoOffsetReset resetPolicy) { return new Consumed<>(null, null, null, resetPolicy, null); } /** * Create an instance of {@link Consumed} with provided processor name. * * @param processorName the processor name to be used. If {@code null} a default processor name will be generated * @param key type * @param value type * @return a new instance of {@link Consumed} */ public static Consumed as(final String processorName) { return new Consumed<>(null, null, null, null, processorName); } /** * Configure the instance of {@link Consumed} with a key {@link Serde}. * * @param keySerde the key serde. If {@code null}the default key serde from config will be used * @return this */ public Consumed withKeySerde(final Serde keySerde) { this.keySerde = keySerde; return this; } /** * Configure the instance of {@link Consumed} with a value {@link Serde}. * * @param valueSerde the value serde. If {@code null} the default value serde from config will be used * @return this */ public Consumed withValueSerde(final Serde valueSerde) { this.valueSerde = valueSerde; return this; } /** * Configure the instance of {@link Consumed} with a {@link TimestampExtractor}. * * @param timestampExtractor the timestamp extractor to used. If {@code null} the default timestamp extractor from config will be used * @return this */ public Consumed withTimestampExtractor(final TimestampExtractor timestampExtractor) { this.timestampExtractor = timestampExtractor; return this; } /** * Configure the instance of {@link Consumed} with a {@link org.apache.kafka.streams.Topology.AutoOffsetReset Topology.AutoOffsetReset}. * * @param resetPolicy the offset reset policy to be used. If {@code null} the default reset policy from config will be used * @return this */ public Consumed withOffsetResetPolicy(final Topology.AutoOffsetReset resetPolicy) { this.resetPolicy = resetPolicy; return this; } /** * Configure the instance of {@link Consumed} with a processor name. * * @param processorName the processor name to be used. If {@code null} a default processor name will be generated * @return this */ @Override public Consumed withName(final String processorName) { this.processorName = processorName; return this; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final Consumed consumed = (Consumed) o; return Objects.equals(keySerde, consumed.keySerde) && Objects.equals(valueSerde, consumed.valueSerde) && Objects.equals(timestampExtractor, consumed.timestampExtractor) && resetPolicy == consumed.resetPolicy; } @Override public int hashCode() { return Objects.hash(keySerde, valueSerde, timestampExtractor, resetPolicy); } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy