All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azurenative.iotoperations.inputs.DataFlowEndpointKafkaArgs Maven / Gradle / Ivy

There is a newer version: 2.82.0
Show newest version
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.azurenative.iotoperations.inputs;

import com.pulumi.azurenative.iotoperations.enums.DataFlowEndpointKafkaAcks;
import com.pulumi.azurenative.iotoperations.enums.DataFlowEndpointKafkaCompression;
import com.pulumi.azurenative.iotoperations.enums.DataFlowEndpointKafkaPartitionStrategy;
import com.pulumi.azurenative.iotoperations.enums.OperationalMode;
import com.pulumi.azurenative.iotoperations.inputs.DataFlowEndpointKafkaBatchingArgs;
import com.pulumi.azurenative.iotoperations.inputs.TlsPropertiesArgs;
import com.pulumi.core.Either;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Import;
import com.pulumi.core.internal.Codegen;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.String;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;


/**
 * Kafka endpoint properties
 * 
 */
public final class DataFlowEndpointKafkaArgs extends com.pulumi.resources.ResourceArgs {

    public static final DataFlowEndpointKafkaArgs Empty = new DataFlowEndpointKafkaArgs();

    /**
     * Batching configuration.
     * 
     */
    @Import(name="batching")
    private @Nullable Output batching;

    /**
     * @return Batching configuration.
     * 
     */
    public Optional> batching() {
        return Optional.ofNullable(this.batching);
    }

    /**
     * Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source.
     * 
     */
    @Import(name="compression")
    private @Nullable Output> compression;

    /**
     * @return Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source.
     * 
     */
    public Optional>> compression() {
        return Optional.ofNullable(this.compression);
    }

    /**
     * Consumer group ID.
     * 
     */
    @Import(name="consumerGroupId")
    private @Nullable Output consumerGroupId;

    /**
     * @return Consumer group ID.
     * 
     */
    public Optional> consumerGroupId() {
        return Optional.ofNullable(this.consumerGroupId);
    }

    /**
     * Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow doesn't have an Broker source.
     * 
     */
    @Import(name="copyMqttProperties")
    private @Nullable Output> copyMqttProperties;

    /**
     * @return Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow doesn't have an Broker source.
     * 
     */
    public Optional>> copyMqttProperties() {
        return Optional.ofNullable(this.copyMqttProperties);
    }

    /**
     * Kafka endpoint host.
     * 
     */
    @Import(name="host")
    private @Nullable Output host;

    /**
     * @return Kafka endpoint host.
     * 
     */
    public Optional> host() {
        return Optional.ofNullable(this.host);
    }

    /**
     * Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source.
     * 
     */
    @Import(name="kafkaAcks")
    private @Nullable Output> kafkaAcks;

    /**
     * @return Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source.
     * 
     */
    public Optional>> kafkaAcks() {
        return Optional.ofNullable(this.kafkaAcks);
    }

    /**
     * Partition handling strategy. Can be default or static. No effect if the endpoint is used as a source.
     * 
     */
    @Import(name="partitionStrategy")
    private @Nullable Output> partitionStrategy;

    /**
     * @return Partition handling strategy. Can be default or static. No effect if the endpoint is used as a source.
     * 
     */
    public Optional>> partitionStrategy() {
        return Optional.ofNullable(this.partitionStrategy);
    }

    /**
     * TLS configuration.
     * 
     */
    @Import(name="tls", required=true)
    private Output tls;

    /**
     * @return TLS configuration.
     * 
     */
    public Output tls() {
        return this.tls;
    }

    private DataFlowEndpointKafkaArgs() {}

    private DataFlowEndpointKafkaArgs(DataFlowEndpointKafkaArgs $) {
        this.batching = $.batching;
        this.compression = $.compression;
        this.consumerGroupId = $.consumerGroupId;
        this.copyMqttProperties = $.copyMqttProperties;
        this.host = $.host;
        this.kafkaAcks = $.kafkaAcks;
        this.partitionStrategy = $.partitionStrategy;
        this.tls = $.tls;
    }

    public static Builder builder() {
        return new Builder();
    }
    public static Builder builder(DataFlowEndpointKafkaArgs defaults) {
        return new Builder(defaults);
    }

    public static final class Builder {
        private DataFlowEndpointKafkaArgs $;

        public Builder() {
            $ = new DataFlowEndpointKafkaArgs();
        }

        public Builder(DataFlowEndpointKafkaArgs defaults) {
            $ = new DataFlowEndpointKafkaArgs(Objects.requireNonNull(defaults));
        }

        /**
         * @param batching Batching configuration.
         * 
         * @return builder
         * 
         */
        public Builder batching(@Nullable Output batching) {
            $.batching = batching;
            return this;
        }

        /**
         * @param batching Batching configuration.
         * 
         * @return builder
         * 
         */
        public Builder batching(DataFlowEndpointKafkaBatchingArgs batching) {
            return batching(Output.of(batching));
        }

        /**
         * @param compression Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder compression(@Nullable Output> compression) {
            $.compression = compression;
            return this;
        }

        /**
         * @param compression Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder compression(Either compression) {
            return compression(Output.of(compression));
        }

        /**
         * @param compression Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder compression(String compression) {
            return compression(Either.ofLeft(compression));
        }

        /**
         * @param compression Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder compression(DataFlowEndpointKafkaCompression compression) {
            return compression(Either.ofRight(compression));
        }

        /**
         * @param consumerGroupId Consumer group ID.
         * 
         * @return builder
         * 
         */
        public Builder consumerGroupId(@Nullable Output consumerGroupId) {
            $.consumerGroupId = consumerGroupId;
            return this;
        }

        /**
         * @param consumerGroupId Consumer group ID.
         * 
         * @return builder
         * 
         */
        public Builder consumerGroupId(String consumerGroupId) {
            return consumerGroupId(Output.of(consumerGroupId));
        }

        /**
         * @param copyMqttProperties Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow doesn't have an Broker source.
         * 
         * @return builder
         * 
         */
        public Builder copyMqttProperties(@Nullable Output> copyMqttProperties) {
            $.copyMqttProperties = copyMqttProperties;
            return this;
        }

        /**
         * @param copyMqttProperties Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow doesn't have an Broker source.
         * 
         * @return builder
         * 
         */
        public Builder copyMqttProperties(Either copyMqttProperties) {
            return copyMqttProperties(Output.of(copyMqttProperties));
        }

        /**
         * @param copyMqttProperties Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow doesn't have an Broker source.
         * 
         * @return builder
         * 
         */
        public Builder copyMqttProperties(String copyMqttProperties) {
            return copyMqttProperties(Either.ofLeft(copyMqttProperties));
        }

        /**
         * @param copyMqttProperties Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow doesn't have an Broker source.
         * 
         * @return builder
         * 
         */
        public Builder copyMqttProperties(OperationalMode copyMqttProperties) {
            return copyMqttProperties(Either.ofRight(copyMqttProperties));
        }

        /**
         * @param host Kafka endpoint host.
         * 
         * @return builder
         * 
         */
        public Builder host(@Nullable Output host) {
            $.host = host;
            return this;
        }

        /**
         * @param host Kafka endpoint host.
         * 
         * @return builder
         * 
         */
        public Builder host(String host) {
            return host(Output.of(host));
        }

        /**
         * @param kafkaAcks Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder kafkaAcks(@Nullable Output> kafkaAcks) {
            $.kafkaAcks = kafkaAcks;
            return this;
        }

        /**
         * @param kafkaAcks Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder kafkaAcks(Either kafkaAcks) {
            return kafkaAcks(Output.of(kafkaAcks));
        }

        /**
         * @param kafkaAcks Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder kafkaAcks(String kafkaAcks) {
            return kafkaAcks(Either.ofLeft(kafkaAcks));
        }

        /**
         * @param kafkaAcks Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder kafkaAcks(DataFlowEndpointKafkaAcks kafkaAcks) {
            return kafkaAcks(Either.ofRight(kafkaAcks));
        }

        /**
         * @param partitionStrategy Partition handling strategy. Can be default or static. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder partitionStrategy(@Nullable Output> partitionStrategy) {
            $.partitionStrategy = partitionStrategy;
            return this;
        }

        /**
         * @param partitionStrategy Partition handling strategy. Can be default or static. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder partitionStrategy(Either partitionStrategy) {
            return partitionStrategy(Output.of(partitionStrategy));
        }

        /**
         * @param partitionStrategy Partition handling strategy. Can be default or static. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder partitionStrategy(String partitionStrategy) {
            return partitionStrategy(Either.ofLeft(partitionStrategy));
        }

        /**
         * @param partitionStrategy Partition handling strategy. Can be default or static. No effect if the endpoint is used as a source.
         * 
         * @return builder
         * 
         */
        public Builder partitionStrategy(DataFlowEndpointKafkaPartitionStrategy partitionStrategy) {
            return partitionStrategy(Either.ofRight(partitionStrategy));
        }

        /**
         * @param tls TLS configuration.
         * 
         * @return builder
         * 
         */
        public Builder tls(Output tls) {
            $.tls = tls;
            return this;
        }

        /**
         * @param tls TLS configuration.
         * 
         * @return builder
         * 
         */
        public Builder tls(TlsPropertiesArgs tls) {
            return tls(Output.of(tls));
        }

        public DataFlowEndpointKafkaArgs build() {
            $.compression = Codegen.stringProp("compression").left(DataFlowEndpointKafkaCompression.class).output().arg($.compression).def("None").getNullable();
            $.copyMqttProperties = Codegen.stringProp("copyMqttProperties").left(OperationalMode.class).output().arg($.copyMqttProperties).def("Disabled").getNullable();
            $.kafkaAcks = Codegen.stringProp("kafkaAcks").left(DataFlowEndpointKafkaAcks.class).output().arg($.kafkaAcks).def("All").getNullable();
            $.partitionStrategy = Codegen.stringProp("partitionStrategy").left(DataFlowEndpointKafkaPartitionStrategy.class).output().arg($.partitionStrategy).def("Default").getNullable();
            if ($.tls == null) {
                throw new MissingRequiredPropertyException("DataFlowEndpointKafkaArgs", "tls");
            }
            return $;
        }
    }

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy