com.pulumi.aws.dms.outputs.EndpointKafkaSettings Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of aws Show documentation
Show all versions of aws Show documentation
A Pulumi package for creating and managing Amazon Web Services (AWS) cloud resources.
The newest version!
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package com.pulumi.aws.dms.outputs;
import com.pulumi.core.annotations.CustomType;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.Boolean;
import java.lang.Integer;
import java.lang.String;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;
@CustomType
public final class EndpointKafkaSettings {
/**
* @return Kafka broker location. Specify in the form broker-hostname-or-ip:port.
*
*/
private String broker;
/**
* @return Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`.
*
*/
private @Nullable Boolean includeControlDetails;
/**
* @return Include NULL and empty columns for records migrated to the endpoint. Default is `false`.
*
*/
private @Nullable Boolean includeNullAndEmpty;
/**
* @return Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. Default is `false`.
*
*/
private @Nullable Boolean includePartitionValue;
/**
* @return Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. Default is `false`.
*
*/
private @Nullable Boolean includeTableAlterOperations;
/**
* @return Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). Default is `false`.
*
*/
private @Nullable Boolean includeTransactionDetails;
/**
* @return Output format for the records created on the endpoint. Message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
*
*/
private @Nullable String messageFormat;
/**
* @return Maximum size in bytes for records created on the endpoint Default is `1,000,000`.
*
*/
private @Nullable Integer messageMaxBytes;
/**
* @return Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `no_hex_prefix` endpoint setting to enable migration of RAW data type columns without adding the `'0x'` prefix.
*
*/
private @Nullable Boolean noHexPrefix;
/**
* @return Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`.
*
*/
private @Nullable Boolean partitionIncludeSchemaTable;
/**
* @return Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*
*/
private @Nullable String saslPassword;
/**
* @return Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*
*/
private @Nullable String saslUsername;
/**
* @return Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `sasl_username` and `sasl_password`.
*
*/
private @Nullable String securityProtocol;
/**
* @return ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint.
*
*/
private @Nullable String sslCaCertificateArn;
/**
* @return ARN of the client certificate used to securely connect to a Kafka target endpoint.
*
*/
private @Nullable String sslClientCertificateArn;
/**
* @return ARN for the client private key used to securely connect to a Kafka target endpoint.
*
*/
private @Nullable String sslClientKeyArn;
/**
* @return Password for the client private key used to securely connect to a Kafka target endpoint.
*
*/
private @Nullable String sslClientKeyPassword;
/**
* @return Kafka topic for migration. Default is `kafka-default-topic`.
*
*/
private @Nullable String topic;
private EndpointKafkaSettings() {}
/**
* @return Kafka broker location. Specify in the form broker-hostname-or-ip:port.
*
*/
public String broker() {
return this.broker;
}
/**
* @return Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`.
*
*/
public Optional includeControlDetails() {
return Optional.ofNullable(this.includeControlDetails);
}
/**
* @return Include NULL and empty columns for records migrated to the endpoint. Default is `false`.
*
*/
public Optional includeNullAndEmpty() {
return Optional.ofNullable(this.includeNullAndEmpty);
}
/**
* @return Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. Default is `false`.
*
*/
public Optional includePartitionValue() {
return Optional.ofNullable(this.includePartitionValue);
}
/**
* @return Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. Default is `false`.
*
*/
public Optional includeTableAlterOperations() {
return Optional.ofNullable(this.includeTableAlterOperations);
}
/**
* @return Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). Default is `false`.
*
*/
public Optional includeTransactionDetails() {
return Optional.ofNullable(this.includeTransactionDetails);
}
/**
* @return Output format for the records created on the endpoint. Message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
*
*/
public Optional messageFormat() {
return Optional.ofNullable(this.messageFormat);
}
/**
* @return Maximum size in bytes for records created on the endpoint Default is `1,000,000`.
*
*/
public Optional messageMaxBytes() {
return Optional.ofNullable(this.messageMaxBytes);
}
/**
* @return Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `no_hex_prefix` endpoint setting to enable migration of RAW data type columns without adding the `'0x'` prefix.
*
*/
public Optional noHexPrefix() {
return Optional.ofNullable(this.noHexPrefix);
}
/**
* @return Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`.
*
*/
public Optional partitionIncludeSchemaTable() {
return Optional.ofNullable(this.partitionIncludeSchemaTable);
}
/**
* @return Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*
*/
public Optional saslPassword() {
return Optional.ofNullable(this.saslPassword);
}
/**
* @return Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
*
*/
public Optional saslUsername() {
return Optional.ofNullable(this.saslUsername);
}
/**
* @return Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `sasl_username` and `sasl_password`.
*
*/
public Optional securityProtocol() {
return Optional.ofNullable(this.securityProtocol);
}
/**
* @return ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint.
*
*/
public Optional sslCaCertificateArn() {
return Optional.ofNullable(this.sslCaCertificateArn);
}
/**
* @return ARN of the client certificate used to securely connect to a Kafka target endpoint.
*
*/
public Optional sslClientCertificateArn() {
return Optional.ofNullable(this.sslClientCertificateArn);
}
/**
* @return ARN for the client private key used to securely connect to a Kafka target endpoint.
*
*/
public Optional sslClientKeyArn() {
return Optional.ofNullable(this.sslClientKeyArn);
}
/**
* @return Password for the client private key used to securely connect to a Kafka target endpoint.
*
*/
public Optional sslClientKeyPassword() {
return Optional.ofNullable(this.sslClientKeyPassword);
}
/**
* @return Kafka topic for migration. Default is `kafka-default-topic`.
*
*/
public Optional topic() {
return Optional.ofNullable(this.topic);
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(EndpointKafkaSettings defaults) {
return new Builder(defaults);
}
@CustomType.Builder
public static final class Builder {
private String broker;
private @Nullable Boolean includeControlDetails;
private @Nullable Boolean includeNullAndEmpty;
private @Nullable Boolean includePartitionValue;
private @Nullable Boolean includeTableAlterOperations;
private @Nullable Boolean includeTransactionDetails;
private @Nullable String messageFormat;
private @Nullable Integer messageMaxBytes;
private @Nullable Boolean noHexPrefix;
private @Nullable Boolean partitionIncludeSchemaTable;
private @Nullable String saslPassword;
private @Nullable String saslUsername;
private @Nullable String securityProtocol;
private @Nullable String sslCaCertificateArn;
private @Nullable String sslClientCertificateArn;
private @Nullable String sslClientKeyArn;
private @Nullable String sslClientKeyPassword;
private @Nullable String topic;
public Builder() {}
public Builder(EndpointKafkaSettings defaults) {
Objects.requireNonNull(defaults);
this.broker = defaults.broker;
this.includeControlDetails = defaults.includeControlDetails;
this.includeNullAndEmpty = defaults.includeNullAndEmpty;
this.includePartitionValue = defaults.includePartitionValue;
this.includeTableAlterOperations = defaults.includeTableAlterOperations;
this.includeTransactionDetails = defaults.includeTransactionDetails;
this.messageFormat = defaults.messageFormat;
this.messageMaxBytes = defaults.messageMaxBytes;
this.noHexPrefix = defaults.noHexPrefix;
this.partitionIncludeSchemaTable = defaults.partitionIncludeSchemaTable;
this.saslPassword = defaults.saslPassword;
this.saslUsername = defaults.saslUsername;
this.securityProtocol = defaults.securityProtocol;
this.sslCaCertificateArn = defaults.sslCaCertificateArn;
this.sslClientCertificateArn = defaults.sslClientCertificateArn;
this.sslClientKeyArn = defaults.sslClientKeyArn;
this.sslClientKeyPassword = defaults.sslClientKeyPassword;
this.topic = defaults.topic;
}
@CustomType.Setter
public Builder broker(String broker) {
if (broker == null) {
throw new MissingRequiredPropertyException("EndpointKafkaSettings", "broker");
}
this.broker = broker;
return this;
}
@CustomType.Setter
public Builder includeControlDetails(@Nullable Boolean includeControlDetails) {
this.includeControlDetails = includeControlDetails;
return this;
}
@CustomType.Setter
public Builder includeNullAndEmpty(@Nullable Boolean includeNullAndEmpty) {
this.includeNullAndEmpty = includeNullAndEmpty;
return this;
}
@CustomType.Setter
public Builder includePartitionValue(@Nullable Boolean includePartitionValue) {
this.includePartitionValue = includePartitionValue;
return this;
}
@CustomType.Setter
public Builder includeTableAlterOperations(@Nullable Boolean includeTableAlterOperations) {
this.includeTableAlterOperations = includeTableAlterOperations;
return this;
}
@CustomType.Setter
public Builder includeTransactionDetails(@Nullable Boolean includeTransactionDetails) {
this.includeTransactionDetails = includeTransactionDetails;
return this;
}
@CustomType.Setter
public Builder messageFormat(@Nullable String messageFormat) {
this.messageFormat = messageFormat;
return this;
}
@CustomType.Setter
public Builder messageMaxBytes(@Nullable Integer messageMaxBytes) {
this.messageMaxBytes = messageMaxBytes;
return this;
}
@CustomType.Setter
public Builder noHexPrefix(@Nullable Boolean noHexPrefix) {
this.noHexPrefix = noHexPrefix;
return this;
}
@CustomType.Setter
public Builder partitionIncludeSchemaTable(@Nullable Boolean partitionIncludeSchemaTable) {
this.partitionIncludeSchemaTable = partitionIncludeSchemaTable;
return this;
}
@CustomType.Setter
public Builder saslPassword(@Nullable String saslPassword) {
this.saslPassword = saslPassword;
return this;
}
@CustomType.Setter
public Builder saslUsername(@Nullable String saslUsername) {
this.saslUsername = saslUsername;
return this;
}
@CustomType.Setter
public Builder securityProtocol(@Nullable String securityProtocol) {
this.securityProtocol = securityProtocol;
return this;
}
@CustomType.Setter
public Builder sslCaCertificateArn(@Nullable String sslCaCertificateArn) {
this.sslCaCertificateArn = sslCaCertificateArn;
return this;
}
@CustomType.Setter
public Builder sslClientCertificateArn(@Nullable String sslClientCertificateArn) {
this.sslClientCertificateArn = sslClientCertificateArn;
return this;
}
@CustomType.Setter
public Builder sslClientKeyArn(@Nullable String sslClientKeyArn) {
this.sslClientKeyArn = sslClientKeyArn;
return this;
}
@CustomType.Setter
public Builder sslClientKeyPassword(@Nullable String sslClientKeyPassword) {
this.sslClientKeyPassword = sslClientKeyPassword;
return this;
}
@CustomType.Setter
public Builder topic(@Nullable String topic) {
this.topic = topic;
return this;
}
public EndpointKafkaSettings build() {
final var _resultValue = new EndpointKafkaSettings();
_resultValue.broker = broker;
_resultValue.includeControlDetails = includeControlDetails;
_resultValue.includeNullAndEmpty = includeNullAndEmpty;
_resultValue.includePartitionValue = includePartitionValue;
_resultValue.includeTableAlterOperations = includeTableAlterOperations;
_resultValue.includeTransactionDetails = includeTransactionDetails;
_resultValue.messageFormat = messageFormat;
_resultValue.messageMaxBytes = messageMaxBytes;
_resultValue.noHexPrefix = noHexPrefix;
_resultValue.partitionIncludeSchemaTable = partitionIncludeSchemaTable;
_resultValue.saslPassword = saslPassword;
_resultValue.saslUsername = saslUsername;
_resultValue.securityProtocol = securityProtocol;
_resultValue.sslCaCertificateArn = sslCaCertificateArn;
_resultValue.sslClientCertificateArn = sslClientCertificateArn;
_resultValue.sslClientKeyArn = sslClientKeyArn;
_resultValue.sslClientKeyPassword = sslClientKeyPassword;
_resultValue.topic = topic;
return _resultValue;
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy