
software.amazon.awssdk.services.databasemigration.model.KafkaSettings Maven / Gradle / Ivy
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package software.amazon.awssdk.services.databasemigration.model;
import java.beans.Transient;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Function;
import software.amazon.awssdk.annotations.Generated;
import software.amazon.awssdk.core.SdkField;
import software.amazon.awssdk.core.SdkPojo;
import software.amazon.awssdk.core.protocol.MarshallLocation;
import software.amazon.awssdk.core.protocol.MarshallingType;
import software.amazon.awssdk.core.traits.LocationTrait;
import software.amazon.awssdk.utils.ToString;
import software.amazon.awssdk.utils.builder.CopyableBuilder;
import software.amazon.awssdk.utils.builder.ToCopyableBuilder;
/**
*
* Provides information that describes an Apache Kafka endpoint. This information includes the output format of records
* applied to the endpoint and details of transaction and control table data information.
*
*/
@Generated("software.amazon.awssdk:codegen")
public final class KafkaSettings implements SdkPojo, Serializable, ToCopyableBuilder {
private static final SdkField BROKER_FIELD = SdkField. builder(MarshallingType.STRING).memberName("Broker")
.getter(getter(KafkaSettings::broker)).setter(setter(Builder::broker))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("Broker").build()).build();
private static final SdkField TOPIC_FIELD = SdkField. builder(MarshallingType.STRING).memberName("Topic")
.getter(getter(KafkaSettings::topic)).setter(setter(Builder::topic))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("Topic").build()).build();
private static final SdkField MESSAGE_FORMAT_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("MessageFormat").getter(getter(KafkaSettings::messageFormatAsString))
.setter(setter(Builder::messageFormat))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("MessageFormat").build()).build();
private static final SdkField INCLUDE_TRANSACTION_DETAILS_FIELD = SdkField
. builder(MarshallingType.BOOLEAN).memberName("IncludeTransactionDetails")
.getter(getter(KafkaSettings::includeTransactionDetails)).setter(setter(Builder::includeTransactionDetails))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("IncludeTransactionDetails").build())
.build();
private static final SdkField INCLUDE_PARTITION_VALUE_FIELD = SdkField. builder(MarshallingType.BOOLEAN)
.memberName("IncludePartitionValue").getter(getter(KafkaSettings::includePartitionValue))
.setter(setter(Builder::includePartitionValue))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("IncludePartitionValue").build())
.build();
private static final SdkField PARTITION_INCLUDE_SCHEMA_TABLE_FIELD = SdkField
. builder(MarshallingType.BOOLEAN)
.memberName("PartitionIncludeSchemaTable")
.getter(getter(KafkaSettings::partitionIncludeSchemaTable))
.setter(setter(Builder::partitionIncludeSchemaTable))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("PartitionIncludeSchemaTable")
.build()).build();
private static final SdkField INCLUDE_TABLE_ALTER_OPERATIONS_FIELD = SdkField
. builder(MarshallingType.BOOLEAN)
.memberName("IncludeTableAlterOperations")
.getter(getter(KafkaSettings::includeTableAlterOperations))
.setter(setter(Builder::includeTableAlterOperations))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("IncludeTableAlterOperations")
.build()).build();
private static final SdkField INCLUDE_CONTROL_DETAILS_FIELD = SdkField. builder(MarshallingType.BOOLEAN)
.memberName("IncludeControlDetails").getter(getter(KafkaSettings::includeControlDetails))
.setter(setter(Builder::includeControlDetails))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("IncludeControlDetails").build())
.build();
private static final SdkField MESSAGE_MAX_BYTES_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("MessageMaxBytes").getter(getter(KafkaSettings::messageMaxBytes))
.setter(setter(Builder::messageMaxBytes))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("MessageMaxBytes").build()).build();
private static final SdkField INCLUDE_NULL_AND_EMPTY_FIELD = SdkField. builder(MarshallingType.BOOLEAN)
.memberName("IncludeNullAndEmpty").getter(getter(KafkaSettings::includeNullAndEmpty))
.setter(setter(Builder::includeNullAndEmpty))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("IncludeNullAndEmpty").build())
.build();
private static final SdkField SECURITY_PROTOCOL_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SecurityProtocol").getter(getter(KafkaSettings::securityProtocolAsString))
.setter(setter(Builder::securityProtocol))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("SecurityProtocol").build()).build();
private static final SdkField SSL_CLIENT_CERTIFICATE_ARN_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SslClientCertificateArn").getter(getter(KafkaSettings::sslClientCertificateArn))
.setter(setter(Builder::sslClientCertificateArn))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("SslClientCertificateArn").build())
.build();
private static final SdkField SSL_CLIENT_KEY_ARN_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SslClientKeyArn").getter(getter(KafkaSettings::sslClientKeyArn))
.setter(setter(Builder::sslClientKeyArn))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("SslClientKeyArn").build()).build();
private static final SdkField SSL_CLIENT_KEY_PASSWORD_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SslClientKeyPassword").getter(getter(KafkaSettings::sslClientKeyPassword))
.setter(setter(Builder::sslClientKeyPassword))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("SslClientKeyPassword").build())
.build();
private static final SdkField SSL_CA_CERTIFICATE_ARN_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SslCaCertificateArn").getter(getter(KafkaSettings::sslCaCertificateArn))
.setter(setter(Builder::sslCaCertificateArn))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("SslCaCertificateArn").build())
.build();
private static final SdkField SASL_USERNAME_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SaslUsername").getter(getter(KafkaSettings::saslUsername)).setter(setter(Builder::saslUsername))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("SaslUsername").build()).build();
private static final SdkField SASL_PASSWORD_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SaslPassword").getter(getter(KafkaSettings::saslPassword)).setter(setter(Builder::saslPassword))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("SaslPassword").build()).build();
private static final SdkField NO_HEX_PREFIX_FIELD = SdkField. builder(MarshallingType.BOOLEAN)
.memberName("NoHexPrefix").getter(getter(KafkaSettings::noHexPrefix)).setter(setter(Builder::noHexPrefix))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("NoHexPrefix").build()).build();
private static final List> SDK_FIELDS = Collections.unmodifiableList(Arrays.asList(BROKER_FIELD, TOPIC_FIELD,
MESSAGE_FORMAT_FIELD, INCLUDE_TRANSACTION_DETAILS_FIELD, INCLUDE_PARTITION_VALUE_FIELD,
PARTITION_INCLUDE_SCHEMA_TABLE_FIELD, INCLUDE_TABLE_ALTER_OPERATIONS_FIELD, INCLUDE_CONTROL_DETAILS_FIELD,
MESSAGE_MAX_BYTES_FIELD, INCLUDE_NULL_AND_EMPTY_FIELD, SECURITY_PROTOCOL_FIELD, SSL_CLIENT_CERTIFICATE_ARN_FIELD,
SSL_CLIENT_KEY_ARN_FIELD, SSL_CLIENT_KEY_PASSWORD_FIELD, SSL_CA_CERTIFICATE_ARN_FIELD, SASL_USERNAME_FIELD,
SASL_PASSWORD_FIELD, NO_HEX_PREFIX_FIELD));
private static final long serialVersionUID = 1L;
private final String broker;
private final String topic;
private final String messageFormat;
private final Boolean includeTransactionDetails;
private final Boolean includePartitionValue;
private final Boolean partitionIncludeSchemaTable;
private final Boolean includeTableAlterOperations;
private final Boolean includeControlDetails;
private final Integer messageMaxBytes;
private final Boolean includeNullAndEmpty;
private final String securityProtocol;
private final String sslClientCertificateArn;
private final String sslClientKeyArn;
private final String sslClientKeyPassword;
private final String sslCaCertificateArn;
private final String saslUsername;
private final String saslPassword;
private final Boolean noHexPrefix;
private KafkaSettings(BuilderImpl builder) {
this.broker = builder.broker;
this.topic = builder.topic;
this.messageFormat = builder.messageFormat;
this.includeTransactionDetails = builder.includeTransactionDetails;
this.includePartitionValue = builder.includePartitionValue;
this.partitionIncludeSchemaTable = builder.partitionIncludeSchemaTable;
this.includeTableAlterOperations = builder.includeTableAlterOperations;
this.includeControlDetails = builder.includeControlDetails;
this.messageMaxBytes = builder.messageMaxBytes;
this.includeNullAndEmpty = builder.includeNullAndEmpty;
this.securityProtocol = builder.securityProtocol;
this.sslClientCertificateArn = builder.sslClientCertificateArn;
this.sslClientKeyArn = builder.sslClientKeyArn;
this.sslClientKeyPassword = builder.sslClientKeyPassword;
this.sslCaCertificateArn = builder.sslCaCertificateArn;
this.saslUsername = builder.saslUsername;
this.saslPassword = builder.saslPassword;
this.noHexPrefix = builder.noHexPrefix;
}
/**
*
* A comma-separated list of one or more broker locations in your Kafka cluster that host your Kafka instance.
* Specify each broker location in the form broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more information and examples of specifying a
* list of broker locations, see Using Apache Kafka as a target for
* Database Migration Service in the Database Migration Service User Guide.
*
*
* @return A comma-separated list of one or more broker locations in your Kafka cluster that host your Kafka
* instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more information and examples of
* specifying a list of broker locations, see Using Apache Kafka as a
* target for Database Migration Service in the Database Migration Service User Guide.
*/
public final String broker() {
return broker;
}
/**
*
* The topic to which you migrate the data. If you don't specify a topic, DMS specifies
* "kafka-default-topic"
as the migration topic.
*
*
* @return The topic to which you migrate the data. If you don't specify a topic, DMS specifies
* "kafka-default-topic"
as the migration topic.
*/
public final String topic() {
return topic;
}
/**
*
* The output format for the records created on the endpoint. The message format is JSON
(default) or
* JSON_UNFORMATTED
(a single line with no tab).
*
*
* If the service returns an enum value that is not available in the current SDK version, {@link #messageFormat}
* will return {@link MessageFormatValue#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #messageFormatAsString}.
*
*
* @return The output format for the records created on the endpoint. The message format is JSON
* (default) or JSON_UNFORMATTED
(a single line with no tab).
* @see MessageFormatValue
*/
public final MessageFormatValue messageFormat() {
return MessageFormatValue.fromValue(messageFormat);
}
/**
*
* The output format for the records created on the endpoint. The message format is JSON
(default) or
* JSON_UNFORMATTED
(a single line with no tab).
*
*
* If the service returns an enum value that is not available in the current SDK version, {@link #messageFormat}
* will return {@link MessageFormatValue#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #messageFormatAsString}.
*
*
* @return The output format for the records created on the endpoint. The message format is JSON
* (default) or JSON_UNFORMATTED
(a single line with no tab).
* @see MessageFormatValue
*/
public final String messageFormatAsString() {
return messageFormat;
}
/**
*
* Provides detailed transaction information from the source database. This information includes a commit timestamp,
* a log position, and values for transaction_id
, previous transaction_id
, and
* transaction_record_id
(the record offset within a transaction). The default is false
.
*
*
* @return Provides detailed transaction information from the source database. This information includes a commit
* timestamp, a log position, and values for transaction_id
, previous
* transaction_id
, and transaction_record_id
(the record offset within a
* transaction). The default is false
.
*/
public final Boolean includeTransactionDetails() {
return includeTransactionDetails;
}
/**
*
* Shows the partition value within the Kafka message output unless the partition type is
* schema-table-type
. The default is false
.
*
*
* @return Shows the partition value within the Kafka message output unless the partition type is
* schema-table-type
. The default is false
.
*/
public final Boolean includePartitionValue() {
return includePartitionValue;
}
/**
*
* Prefixes schema and table names to partition values, when the partition type is primary-key-type
.
* Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has
* thousands of tables and each table has only limited range for a primary key. In this case, the same primary key
* is sent from thousands of tables to the same partition, which causes throttling. The default is
* false
.
*
*
* @return Prefixes schema and table names to partition values, when the partition type is
* primary-key-type
. Doing this increases data distribution among Kafka partitions. For
* example, suppose that a SysBench schema has thousands of tables and each table has only limited range for
* a primary key. In this case, the same primary key is sent from thousands of tables to the same partition,
* which causes throttling. The default is false
.
*/
public final Boolean partitionIncludeSchemaTable() {
return partitionIncludeSchemaTable;
}
/**
*
* Includes any data definition language (DDL) operations that change the table in the control data, such as
* rename-table
, drop-table
, add-column
, drop-column
, and
* rename-column
. The default is false
.
*
*
* @return Includes any data definition language (DDL) operations that change the table in the control data, such as
* rename-table
, drop-table
, add-column
, drop-column
,
* and rename-column
. The default is false
.
*/
public final Boolean includeTableAlterOperations() {
return includeTableAlterOperations;
}
/**
*
* Shows detailed control information for table definition, column definition, and table and column changes in the
* Kafka message output. The default is false
.
*
*
* @return Shows detailed control information for table definition, column definition, and table and column changes
* in the Kafka message output. The default is false
.
*/
public final Boolean includeControlDetails() {
return includeControlDetails;
}
/**
*
* The maximum size in bytes for records created on the endpoint The default is 1,000,000.
*
*
* @return The maximum size in bytes for records created on the endpoint The default is 1,000,000.
*/
public final Integer messageMaxBytes() {
return messageMaxBytes;
}
/**
*
* Include NULL and empty columns for records migrated to the endpoint. The default is false
.
*
*
* @return Include NULL and empty columns for records migrated to the endpoint. The default is false
.
*/
public final Boolean includeNullAndEmpty() {
return includeNullAndEmpty;
}
/**
*
* Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include
* ssl-encryption
, ssl-authentication
, and sasl-ssl
. sasl-ssl
* requires SaslUsername
and SaslPassword
.
*
*
* If the service returns an enum value that is not available in the current SDK version, {@link #securityProtocol}
* will return {@link KafkaSecurityProtocol#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #securityProtocolAsString}.
*
*
* @return Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include
* ssl-encryption
, ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and SaslPassword
.
* @see KafkaSecurityProtocol
*/
public final KafkaSecurityProtocol securityProtocol() {
return KafkaSecurityProtocol.fromValue(securityProtocol);
}
/**
*
* Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include
* ssl-encryption
, ssl-authentication
, and sasl-ssl
. sasl-ssl
* requires SaslUsername
and SaslPassword
.
*
*
* If the service returns an enum value that is not available in the current SDK version, {@link #securityProtocol}
* will return {@link KafkaSecurityProtocol#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #securityProtocolAsString}.
*
*
* @return Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include
* ssl-encryption
, ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and SaslPassword
.
* @see KafkaSecurityProtocol
*/
public final String securityProtocolAsString() {
return securityProtocol;
}
/**
*
* The Amazon Resource Name (ARN) of the client certificate used to securely connect to a Kafka target endpoint.
*
*
* @return The Amazon Resource Name (ARN) of the client certificate used to securely connect to a Kafka target
* endpoint.
*/
public final String sslClientCertificateArn() {
return sslClientCertificateArn;
}
/**
*
* The Amazon Resource Name (ARN) for the client private key used to securely connect to a Kafka target endpoint.
*
*
* @return The Amazon Resource Name (ARN) for the client private key used to securely connect to a Kafka target
* endpoint.
*/
public final String sslClientKeyArn() {
return sslClientKeyArn;
}
/**
*
* The password for the client private key used to securely connect to a Kafka target endpoint.
*
*
* @return The password for the client private key used to securely connect to a Kafka target endpoint.
*/
public final String sslClientKeyPassword() {
return sslClientKeyPassword;
}
/**
*
* The Amazon Resource Name (ARN) for the private certificate authority (CA) cert that DMS uses to securely connect
* to your Kafka target endpoint.
*
*
* @return The Amazon Resource Name (ARN) for the private certificate authority (CA) cert that DMS uses to securely
* connect to your Kafka target endpoint.
*/
public final String sslCaCertificateArn() {
return sslCaCertificateArn;
}
/**
*
* The secure user name you created when you first set up your MSK cluster to validate a client identity and make an
* encrypted connection between server and client using SASL-SSL authentication.
*
*
* @return The secure user name you created when you first set up your MSK cluster to validate a client identity and
* make an encrypted connection between server and client using SASL-SSL authentication.
*/
public final String saslUsername() {
return saslUsername;
}
/**
*
* The secure password you created when you first set up your MSK cluster to validate a client identity and make an
* encrypted connection between server and client using SASL-SSL authentication.
*
*
* @return The secure password you created when you first set up your MSK cluster to validate a client identity and
* make an encrypted connection between server and client using SASL-SSL authentication.
*/
public final String saslPassword() {
return saslPassword;
}
/**
*
* Set this optional parameter to true
to avoid adding a '0x' prefix to raw data in hexadecimal format.
* For example, by default, DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an
* Oracle source to a Kafka target. Use the NoHexPrefix
endpoint setting to enable migration of RAW
* data type columns without adding the '0x' prefix.
*
*
* @return Set this optional parameter to true
to avoid adding a '0x' prefix to raw data in hexadecimal
* format. For example, by default, DMS adds a '0x' prefix to the LOB column type in hexadecimal format
* moving from an Oracle source to a Kafka target. Use the NoHexPrefix
endpoint setting to
* enable migration of RAW data type columns without adding the '0x' prefix.
*/
public final Boolean noHexPrefix() {
return noHexPrefix;
}
@Override
public Builder toBuilder() {
return new BuilderImpl(this);
}
public static Builder builder() {
return new BuilderImpl();
}
public static Class extends Builder> serializableBuilderClass() {
return BuilderImpl.class;
}
@Override
public final int hashCode() {
int hashCode = 1;
hashCode = 31 * hashCode + Objects.hashCode(broker());
hashCode = 31 * hashCode + Objects.hashCode(topic());
hashCode = 31 * hashCode + Objects.hashCode(messageFormatAsString());
hashCode = 31 * hashCode + Objects.hashCode(includeTransactionDetails());
hashCode = 31 * hashCode + Objects.hashCode(includePartitionValue());
hashCode = 31 * hashCode + Objects.hashCode(partitionIncludeSchemaTable());
hashCode = 31 * hashCode + Objects.hashCode(includeTableAlterOperations());
hashCode = 31 * hashCode + Objects.hashCode(includeControlDetails());
hashCode = 31 * hashCode + Objects.hashCode(messageMaxBytes());
hashCode = 31 * hashCode + Objects.hashCode(includeNullAndEmpty());
hashCode = 31 * hashCode + Objects.hashCode(securityProtocolAsString());
hashCode = 31 * hashCode + Objects.hashCode(sslClientCertificateArn());
hashCode = 31 * hashCode + Objects.hashCode(sslClientKeyArn());
hashCode = 31 * hashCode + Objects.hashCode(sslClientKeyPassword());
hashCode = 31 * hashCode + Objects.hashCode(sslCaCertificateArn());
hashCode = 31 * hashCode + Objects.hashCode(saslUsername());
hashCode = 31 * hashCode + Objects.hashCode(saslPassword());
hashCode = 31 * hashCode + Objects.hashCode(noHexPrefix());
return hashCode;
}
@Override
public final boolean equals(Object obj) {
return equalsBySdkFields(obj);
}
@Override
public final boolean equalsBySdkFields(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof KafkaSettings)) {
return false;
}
KafkaSettings other = (KafkaSettings) obj;
return Objects.equals(broker(), other.broker()) && Objects.equals(topic(), other.topic())
&& Objects.equals(messageFormatAsString(), other.messageFormatAsString())
&& Objects.equals(includeTransactionDetails(), other.includeTransactionDetails())
&& Objects.equals(includePartitionValue(), other.includePartitionValue())
&& Objects.equals(partitionIncludeSchemaTable(), other.partitionIncludeSchemaTable())
&& Objects.equals(includeTableAlterOperations(), other.includeTableAlterOperations())
&& Objects.equals(includeControlDetails(), other.includeControlDetails())
&& Objects.equals(messageMaxBytes(), other.messageMaxBytes())
&& Objects.equals(includeNullAndEmpty(), other.includeNullAndEmpty())
&& Objects.equals(securityProtocolAsString(), other.securityProtocolAsString())
&& Objects.equals(sslClientCertificateArn(), other.sslClientCertificateArn())
&& Objects.equals(sslClientKeyArn(), other.sslClientKeyArn())
&& Objects.equals(sslClientKeyPassword(), other.sslClientKeyPassword())
&& Objects.equals(sslCaCertificateArn(), other.sslCaCertificateArn())
&& Objects.equals(saslUsername(), other.saslUsername()) && Objects.equals(saslPassword(), other.saslPassword())
&& Objects.equals(noHexPrefix(), other.noHexPrefix());
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*/
@Override
public final String toString() {
return ToString.builder("KafkaSettings").add("Broker", broker()).add("Topic", topic())
.add("MessageFormat", messageFormatAsString()).add("IncludeTransactionDetails", includeTransactionDetails())
.add("IncludePartitionValue", includePartitionValue())
.add("PartitionIncludeSchemaTable", partitionIncludeSchemaTable())
.add("IncludeTableAlterOperations", includeTableAlterOperations())
.add("IncludeControlDetails", includeControlDetails()).add("MessageMaxBytes", messageMaxBytes())
.add("IncludeNullAndEmpty", includeNullAndEmpty()).add("SecurityProtocol", securityProtocolAsString())
.add("SslClientCertificateArn", sslClientCertificateArn()).add("SslClientKeyArn", sslClientKeyArn())
.add("SslClientKeyPassword", sslClientKeyPassword() == null ? null : "*** Sensitive Data Redacted ***")
.add("SslCaCertificateArn", sslCaCertificateArn()).add("SaslUsername", saslUsername())
.add("SaslPassword", saslPassword() == null ? null : "*** Sensitive Data Redacted ***")
.add("NoHexPrefix", noHexPrefix()).build();
}
public final Optional getValueForField(String fieldName, Class clazz) {
switch (fieldName) {
case "Broker":
return Optional.ofNullable(clazz.cast(broker()));
case "Topic":
return Optional.ofNullable(clazz.cast(topic()));
case "MessageFormat":
return Optional.ofNullable(clazz.cast(messageFormatAsString()));
case "IncludeTransactionDetails":
return Optional.ofNullable(clazz.cast(includeTransactionDetails()));
case "IncludePartitionValue":
return Optional.ofNullable(clazz.cast(includePartitionValue()));
case "PartitionIncludeSchemaTable":
return Optional.ofNullable(clazz.cast(partitionIncludeSchemaTable()));
case "IncludeTableAlterOperations":
return Optional.ofNullable(clazz.cast(includeTableAlterOperations()));
case "IncludeControlDetails":
return Optional.ofNullable(clazz.cast(includeControlDetails()));
case "MessageMaxBytes":
return Optional.ofNullable(clazz.cast(messageMaxBytes()));
case "IncludeNullAndEmpty":
return Optional.ofNullable(clazz.cast(includeNullAndEmpty()));
case "SecurityProtocol":
return Optional.ofNullable(clazz.cast(securityProtocolAsString()));
case "SslClientCertificateArn":
return Optional.ofNullable(clazz.cast(sslClientCertificateArn()));
case "SslClientKeyArn":
return Optional.ofNullable(clazz.cast(sslClientKeyArn()));
case "SslClientKeyPassword":
return Optional.ofNullable(clazz.cast(sslClientKeyPassword()));
case "SslCaCertificateArn":
return Optional.ofNullable(clazz.cast(sslCaCertificateArn()));
case "SaslUsername":
return Optional.ofNullable(clazz.cast(saslUsername()));
case "SaslPassword":
return Optional.ofNullable(clazz.cast(saslPassword()));
case "NoHexPrefix":
return Optional.ofNullable(clazz.cast(noHexPrefix()));
default:
return Optional.empty();
}
}
@Override
public final List> sdkFields() {
return SDK_FIELDS;
}
private static Function