commonMain.aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSettings.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of databasemigrationservice-jvm Show documentation
Show all versions of databasemigrationservice-jvm Show documentation
The AWS SDK for Kotlin client for Database Migration Service
The newest version!
// Code generated by smithy-kotlin-codegen. DO NOT EDIT!
package aws.sdk.kotlin.services.databasemigrationservice.model
import aws.smithy.kotlin.runtime.SdkDsl
/**
* Provides information that describes an Apache Kafka endpoint. This information includes the output format of records applied to the endpoint and details of transaction and control table data information.
*/
public class KafkaSettings private constructor(builder: Builder) {
/**
* A comma-separated list of one or more broker locations in your Kafka cluster that host your Kafka instance. Specify each broker location in the form ` broker-hostname-or-ip:port `. For example, `"ec2-12-345-678-901.compute-1.amazonaws.com:2345"`. For more information and examples of specifying a list of broker locations, see [Using Apache Kafka as a target for Database Migration Service](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Kafka.html) in the *Database Migration Service User Guide*.
*/
public val broker: kotlin.String? = builder.broker
/**
* Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. The default is `false`.
*/
public val includeControlDetails: kotlin.Boolean? = builder.includeControlDetails
/**
* Include NULL and empty columns for records migrated to the endpoint. The default is `false`.
*/
public val includeNullAndEmpty: kotlin.Boolean? = builder.includeNullAndEmpty
/**
* Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. The default is `false`.
*/
public val includePartitionValue: kotlin.Boolean? = builder.includePartitionValue
/**
* Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. The default is `false`.
*/
public val includeTableAlterOperations: kotlin.Boolean? = builder.includeTableAlterOperations
/**
* Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). The default is `false`.
*/
public val includeTransactionDetails: kotlin.Boolean? = builder.includeTransactionDetails
/**
* The output format for the records created on the endpoint. The message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
*/
public val messageFormat: aws.sdk.kotlin.services.databasemigrationservice.model.MessageFormatValue? = builder.messageFormat
/**
* The maximum size in bytes for records created on the endpoint The default is 1,000,000.
*/
public val messageMaxBytes: kotlin.Int? = builder.messageMaxBytes
/**
* Set this optional parameter to `true` to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `NoHexPrefix` endpoint setting to enable migration of RAW data type columns without adding the '0x' prefix.
*/
public val noHexPrefix: kotlin.Boolean? = builder.noHexPrefix
/**
* Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. The default is `false`.
*/
public val partitionIncludeSchemaTable: kotlin.Boolean? = builder.partitionIncludeSchemaTable
/**
* For SASL/SSL authentication, DMS supports the `SCRAM-SHA-512` mechanism by default. DMS versions 3.5.0 and later also support the `PLAIN` mechanism. To use the `PLAIN` mechanism, set this parameter to `PLAIN.`
*/
public val saslMechanism: aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSaslMechanism? = builder.saslMechanism
/**
* The secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*/
public val saslPassword: kotlin.String? = builder.saslPassword
/**
* The secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*/
public val saslUsername: kotlin.String? = builder.saslUsername
/**
* Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `SaslUsername` and `SaslPassword`.
*/
public val securityProtocol: aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSecurityProtocol? = builder.securityProtocol
/**
* The Amazon Resource Name (ARN) for the private certificate authority (CA) cert that DMS uses to securely connect to your Kafka target endpoint.
*/
public val sslCaCertificateArn: kotlin.String? = builder.sslCaCertificateArn
/**
* The Amazon Resource Name (ARN) of the client certificate used to securely connect to a Kafka target endpoint.
*/
public val sslClientCertificateArn: kotlin.String? = builder.sslClientCertificateArn
/**
* The Amazon Resource Name (ARN) for the client private key used to securely connect to a Kafka target endpoint.
*/
public val sslClientKeyArn: kotlin.String? = builder.sslClientKeyArn
/**
* The password for the client private key used to securely connect to a Kafka target endpoint.
*/
public val sslClientKeyPassword: kotlin.String? = builder.sslClientKeyPassword
/**
* Sets hostname verification for the certificate. This setting is supported in DMS version 3.5.1 and later.
*/
public val sslEndpointIdentificationAlgorithm: aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSslEndpointIdentificationAlgorithm? = builder.sslEndpointIdentificationAlgorithm
/**
* The topic to which you migrate the data. If you don't specify a topic, DMS specifies `"kafka-default-topic"` as the migration topic.
*/
public val topic: kotlin.String? = builder.topic
/**
* Specifies using the large integer value with Kafka.
*/
public val useLargeIntegerValue: kotlin.Boolean? = builder.useLargeIntegerValue
public companion object {
public operator fun invoke(block: Builder.() -> kotlin.Unit): aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSettings = Builder().apply(block).build()
}
override fun toString(): kotlin.String = buildString {
append("KafkaSettings(")
append("broker=$broker,")
append("includeControlDetails=$includeControlDetails,")
append("includeNullAndEmpty=$includeNullAndEmpty,")
append("includePartitionValue=$includePartitionValue,")
append("includeTableAlterOperations=$includeTableAlterOperations,")
append("includeTransactionDetails=$includeTransactionDetails,")
append("messageFormat=$messageFormat,")
append("messageMaxBytes=$messageMaxBytes,")
append("noHexPrefix=$noHexPrefix,")
append("partitionIncludeSchemaTable=$partitionIncludeSchemaTable,")
append("saslMechanism=$saslMechanism,")
append("saslPassword=*** Sensitive Data Redacted ***,")
append("saslUsername=$saslUsername,")
append("securityProtocol=$securityProtocol,")
append("sslCaCertificateArn=$sslCaCertificateArn,")
append("sslClientCertificateArn=$sslClientCertificateArn,")
append("sslClientKeyArn=$sslClientKeyArn,")
append("sslClientKeyPassword=*** Sensitive Data Redacted ***,")
append("sslEndpointIdentificationAlgorithm=$sslEndpointIdentificationAlgorithm,")
append("topic=$topic,")
append("useLargeIntegerValue=$useLargeIntegerValue")
append(")")
}
override fun hashCode(): kotlin.Int {
var result = broker?.hashCode() ?: 0
result = 31 * result + (includeControlDetails?.hashCode() ?: 0)
result = 31 * result + (includeNullAndEmpty?.hashCode() ?: 0)
result = 31 * result + (includePartitionValue?.hashCode() ?: 0)
result = 31 * result + (includeTableAlterOperations?.hashCode() ?: 0)
result = 31 * result + (includeTransactionDetails?.hashCode() ?: 0)
result = 31 * result + (messageFormat?.hashCode() ?: 0)
result = 31 * result + (messageMaxBytes ?: 0)
result = 31 * result + (noHexPrefix?.hashCode() ?: 0)
result = 31 * result + (partitionIncludeSchemaTable?.hashCode() ?: 0)
result = 31 * result + (saslMechanism?.hashCode() ?: 0)
result = 31 * result + (saslPassword?.hashCode() ?: 0)
result = 31 * result + (saslUsername?.hashCode() ?: 0)
result = 31 * result + (securityProtocol?.hashCode() ?: 0)
result = 31 * result + (sslCaCertificateArn?.hashCode() ?: 0)
result = 31 * result + (sslClientCertificateArn?.hashCode() ?: 0)
result = 31 * result + (sslClientKeyArn?.hashCode() ?: 0)
result = 31 * result + (sslClientKeyPassword?.hashCode() ?: 0)
result = 31 * result + (sslEndpointIdentificationAlgorithm?.hashCode() ?: 0)
result = 31 * result + (topic?.hashCode() ?: 0)
result = 31 * result + (useLargeIntegerValue?.hashCode() ?: 0)
return result
}
override fun equals(other: kotlin.Any?): kotlin.Boolean {
if (this === other) return true
if (other == null || this::class != other::class) return false
other as KafkaSettings
if (broker != other.broker) return false
if (includeControlDetails != other.includeControlDetails) return false
if (includeNullAndEmpty != other.includeNullAndEmpty) return false
if (includePartitionValue != other.includePartitionValue) return false
if (includeTableAlterOperations != other.includeTableAlterOperations) return false
if (includeTransactionDetails != other.includeTransactionDetails) return false
if (messageFormat != other.messageFormat) return false
if (messageMaxBytes != other.messageMaxBytes) return false
if (noHexPrefix != other.noHexPrefix) return false
if (partitionIncludeSchemaTable != other.partitionIncludeSchemaTable) return false
if (saslMechanism != other.saslMechanism) return false
if (saslPassword != other.saslPassword) return false
if (saslUsername != other.saslUsername) return false
if (securityProtocol != other.securityProtocol) return false
if (sslCaCertificateArn != other.sslCaCertificateArn) return false
if (sslClientCertificateArn != other.sslClientCertificateArn) return false
if (sslClientKeyArn != other.sslClientKeyArn) return false
if (sslClientKeyPassword != other.sslClientKeyPassword) return false
if (sslEndpointIdentificationAlgorithm != other.sslEndpointIdentificationAlgorithm) return false
if (topic != other.topic) return false
if (useLargeIntegerValue != other.useLargeIntegerValue) return false
return true
}
public inline fun copy(block: Builder.() -> kotlin.Unit = {}): aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSettings = Builder(this).apply(block).build()
@SdkDsl
public class Builder {
/**
* A comma-separated list of one or more broker locations in your Kafka cluster that host your Kafka instance. Specify each broker location in the form ` broker-hostname-or-ip:port `. For example, `"ec2-12-345-678-901.compute-1.amazonaws.com:2345"`. For more information and examples of specifying a list of broker locations, see [Using Apache Kafka as a target for Database Migration Service](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Kafka.html) in the *Database Migration Service User Guide*.
*/
public var broker: kotlin.String? = null
/**
* Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. The default is `false`.
*/
public var includeControlDetails: kotlin.Boolean? = null
/**
* Include NULL and empty columns for records migrated to the endpoint. The default is `false`.
*/
public var includeNullAndEmpty: kotlin.Boolean? = null
/**
* Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. The default is `false`.
*/
public var includePartitionValue: kotlin.Boolean? = null
/**
* Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. The default is `false`.
*/
public var includeTableAlterOperations: kotlin.Boolean? = null
/**
* Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). The default is `false`.
*/
public var includeTransactionDetails: kotlin.Boolean? = null
/**
* The output format for the records created on the endpoint. The message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
*/
public var messageFormat: aws.sdk.kotlin.services.databasemigrationservice.model.MessageFormatValue? = null
/**
* The maximum size in bytes for records created on the endpoint The default is 1,000,000.
*/
public var messageMaxBytes: kotlin.Int? = null
/**
* Set this optional parameter to `true` to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `NoHexPrefix` endpoint setting to enable migration of RAW data type columns without adding the '0x' prefix.
*/
public var noHexPrefix: kotlin.Boolean? = null
/**
* Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. The default is `false`.
*/
public var partitionIncludeSchemaTable: kotlin.Boolean? = null
/**
* For SASL/SSL authentication, DMS supports the `SCRAM-SHA-512` mechanism by default. DMS versions 3.5.0 and later also support the `PLAIN` mechanism. To use the `PLAIN` mechanism, set this parameter to `PLAIN.`
*/
public var saslMechanism: aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSaslMechanism? = null
/**
* The secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*/
public var saslPassword: kotlin.String? = null
/**
* The secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*/
public var saslUsername: kotlin.String? = null
/**
* Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `SaslUsername` and `SaslPassword`.
*/
public var securityProtocol: aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSecurityProtocol? = null
/**
* The Amazon Resource Name (ARN) for the private certificate authority (CA) cert that DMS uses to securely connect to your Kafka target endpoint.
*/
public var sslCaCertificateArn: kotlin.String? = null
/**
* The Amazon Resource Name (ARN) of the client certificate used to securely connect to a Kafka target endpoint.
*/
public var sslClientCertificateArn: kotlin.String? = null
/**
* The Amazon Resource Name (ARN) for the client private key used to securely connect to a Kafka target endpoint.
*/
public var sslClientKeyArn: kotlin.String? = null
/**
* The password for the client private key used to securely connect to a Kafka target endpoint.
*/
public var sslClientKeyPassword: kotlin.String? = null
/**
* Sets hostname verification for the certificate. This setting is supported in DMS version 3.5.1 and later.
*/
public var sslEndpointIdentificationAlgorithm: aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSslEndpointIdentificationAlgorithm? = null
/**
* The topic to which you migrate the data. If you don't specify a topic, DMS specifies `"kafka-default-topic"` as the migration topic.
*/
public var topic: kotlin.String? = null
/**
* Specifies using the large integer value with Kafka.
*/
public var useLargeIntegerValue: kotlin.Boolean? = null
@PublishedApi
internal constructor()
@PublishedApi
internal constructor(x: aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSettings) : this() {
this.broker = x.broker
this.includeControlDetails = x.includeControlDetails
this.includeNullAndEmpty = x.includeNullAndEmpty
this.includePartitionValue = x.includePartitionValue
this.includeTableAlterOperations = x.includeTableAlterOperations
this.includeTransactionDetails = x.includeTransactionDetails
this.messageFormat = x.messageFormat
this.messageMaxBytes = x.messageMaxBytes
this.noHexPrefix = x.noHexPrefix
this.partitionIncludeSchemaTable = x.partitionIncludeSchemaTable
this.saslMechanism = x.saslMechanism
this.saslPassword = x.saslPassword
this.saslUsername = x.saslUsername
this.securityProtocol = x.securityProtocol
this.sslCaCertificateArn = x.sslCaCertificateArn
this.sslClientCertificateArn = x.sslClientCertificateArn
this.sslClientKeyArn = x.sslClientKeyArn
this.sslClientKeyPassword = x.sslClientKeyPassword
this.sslEndpointIdentificationAlgorithm = x.sslEndpointIdentificationAlgorithm
this.topic = x.topic
this.useLargeIntegerValue = x.useLargeIntegerValue
}
@PublishedApi
internal fun build(): aws.sdk.kotlin.services.databasemigrationservice.model.KafkaSettings = KafkaSettings(this)
internal fun correctErrors(): Builder {
return this
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy