All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.digitalocean.kotlin.DatabaseKafkaTopic.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.digitalocean.kotlin

import com.pulumi.core.Output
import com.pulumi.digitalocean.kotlin.outputs.DatabaseKafkaTopicConfig
import com.pulumi.digitalocean.kotlin.outputs.DatabaseKafkaTopicConfig.Companion.toKotlin
import com.pulumi.kotlin.KotlinCustomResource
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.ResourceMapper
import com.pulumi.kotlin.options.CustomResourceOptions
import com.pulumi.kotlin.options.CustomResourceOptionsBuilder
import com.pulumi.resources.Resource
import kotlin.Boolean
import kotlin.Int
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List

/**
 * Builder for [DatabaseKafkaTopic].
 */
@PulumiTagMarker
public class DatabaseKafkaTopicResourceBuilder internal constructor() {
    public var name: String? = null

    public var args: DatabaseKafkaTopicArgs = DatabaseKafkaTopicArgs()

    public var opts: CustomResourceOptions = CustomResourceOptions()

    /**
     * @param name The _unique_ name of the resulting resource.
     */
    public fun name(`value`: String) {
        this.name = value
    }

    /**
     * @param block The arguments to use to populate this resource's properties.
     */
    public suspend fun args(block: suspend DatabaseKafkaTopicArgsBuilder.() -> Unit) {
        val builder = DatabaseKafkaTopicArgsBuilder()
        block(builder)
        this.args = builder.build()
    }

    /**
     * @param block A bag of options that control this resource's behavior.
     */
    public suspend fun opts(block: suspend CustomResourceOptionsBuilder.() -> Unit) {
        this.opts = com.pulumi.kotlin.options.CustomResourceOptions.opts(block)
    }

    internal fun build(): DatabaseKafkaTopic {
        val builtJavaResource = com.pulumi.digitalocean.DatabaseKafkaTopic(
            this.name,
            this.args.toJava(),
            this.opts.toJava(),
        )
        return DatabaseKafkaTopic(builtJavaResource)
    }
}

/**
 * Provides a DigitalOcean Kafka topic for Kafka clusters.
 * ## Example Usage
 * ### Create a new Kafka topic
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as digitalocean from "@pulumi/digitalocean";
 * const kafka_example = new digitalocean.DatabaseCluster("kafka-example", {
 *     name: "example-kafka-cluster",
 *     engine: "kafka",
 *     version: "3.5",
 *     size: "db-s-2vcpu-2gb",
 *     region: digitalocean.Region.NYC1,
 *     nodeCount: 3,
 *     tags: ["production"],
 * });
 * const topic_01 = new digitalocean.DatabaseKafkaTopic("topic-01", {
 *     clusterId: kafka_example.id,
 *     name: "topic-01",
 *     partitionCount: 3,
 *     replicationFactor: 2,
 *     configs: [{
 *         cleanupPolicy: "compact",
 *         compressionType: "uncompressed",
 *         deleteRetentionMs: "14000",
 *         fileDeleteDelayMs: "170000",
 *         flushMessages: "92233",
 *         flushMs: "92233720368",
 *         indexIntervalBytes: "40962",
 *         maxCompactionLagMs: "9223372036854775807",
 *         maxMessageBytes: "1048588",
 *         messageDownConversionEnable: true,
 *         messageFormatVersion: "3.0-IV1",
 *         messageTimestampDifferenceMaxMs: "9223372036854775807",
 *         messageTimestampType: "log_append_time",
 *         minCleanableDirtyRatio: 0.5,
 *         minCompactionLagMs: "20000",
 *         minInsyncReplicas: 2,
 *         preallocate: false,
 *         retentionBytes: "-1",
 *         retentionMs: "-1",
 *         segmentBytes: "209715200",
 *         segmentIndexBytes: "10485760",
 *         segmentJitterMs: "0",
 *         segmentMs: "604800000",
 *     }],
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_digitalocean as digitalocean
 * kafka_example = digitalocean.DatabaseCluster("kafka-example",
 *     name="example-kafka-cluster",
 *     engine="kafka",
 *     version="3.5",
 *     size="db-s-2vcpu-2gb",
 *     region=digitalocean.Region.NYC1,
 *     node_count=3,
 *     tags=["production"])
 * topic_01 = digitalocean.DatabaseKafkaTopic("topic-01",
 *     cluster_id=kafka_example.id,
 *     name="topic-01",
 *     partition_count=3,
 *     replication_factor=2,
 *     configs=[{
 *         "cleanup_policy": "compact",
 *         "compression_type": "uncompressed",
 *         "delete_retention_ms": "14000",
 *         "file_delete_delay_ms": "170000",
 *         "flush_messages": "92233",
 *         "flush_ms": "92233720368",
 *         "index_interval_bytes": "40962",
 *         "max_compaction_lag_ms": "9223372036854775807",
 *         "max_message_bytes": "1048588",
 *         "message_down_conversion_enable": True,
 *         "message_format_version": "3.0-IV1",
 *         "message_timestamp_difference_max_ms": "9223372036854775807",
 *         "message_timestamp_type": "log_append_time",
 *         "min_cleanable_dirty_ratio": 0.5,
 *         "min_compaction_lag_ms": "20000",
 *         "min_insync_replicas": 2,
 *         "preallocate": False,
 *         "retention_bytes": "-1",
 *         "retention_ms": "-1",
 *         "segment_bytes": "209715200",
 *         "segment_index_bytes": "10485760",
 *         "segment_jitter_ms": "0",
 *         "segment_ms": "604800000",
 *     }])
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using DigitalOcean = Pulumi.DigitalOcean;
 * return await Deployment.RunAsync(() =>
 * {
 *     var kafka_example = new DigitalOcean.DatabaseCluster("kafka-example", new()
 *     {
 *         Name = "example-kafka-cluster",
 *         Engine = "kafka",
 *         Version = "3.5",
 *         Size = "db-s-2vcpu-2gb",
 *         Region = DigitalOcean.Region.NYC1,
 *         NodeCount = 3,
 *         Tags = new[]
 *         {
 *             "production",
 *         },
 *     });
 *     var topic_01 = new DigitalOcean.DatabaseKafkaTopic("topic-01", new()
 *     {
 *         ClusterId = kafka_example.Id,
 *         Name = "topic-01",
 *         PartitionCount = 3,
 *         ReplicationFactor = 2,
 *         Configs = new[]
 *         {
 *             new DigitalOcean.Inputs.DatabaseKafkaTopicConfigArgs
 *             {
 *                 CleanupPolicy = "compact",
 *                 CompressionType = "uncompressed",
 *                 DeleteRetentionMs = "14000",
 *                 FileDeleteDelayMs = "170000",
 *                 FlushMessages = "92233",
 *                 FlushMs = "92233720368",
 *                 IndexIntervalBytes = "40962",
 *                 MaxCompactionLagMs = "9223372036854775807",
 *                 MaxMessageBytes = "1048588",
 *                 MessageDownConversionEnable = true,
 *                 MessageFormatVersion = "3.0-IV1",
 *                 MessageTimestampDifferenceMaxMs = "9223372036854775807",
 *                 MessageTimestampType = "log_append_time",
 *                 MinCleanableDirtyRatio = 0.5,
 *                 MinCompactionLagMs = "20000",
 *                 MinInsyncReplicas = 2,
 *                 Preallocate = false,
 *                 RetentionBytes = "-1",
 *                 RetentionMs = "-1",
 *                 SegmentBytes = "209715200",
 *                 SegmentIndexBytes = "10485760",
 *                 SegmentJitterMs = "0",
 *                 SegmentMs = "604800000",
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-digitalocean/sdk/v4/go/digitalocean"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := digitalocean.NewDatabaseCluster(ctx, "kafka-example", &digitalocean.DatabaseClusterArgs{
 * 			Name:      pulumi.String("example-kafka-cluster"),
 * 			Engine:    pulumi.String("kafka"),
 * 			Version:   pulumi.String("3.5"),
 * 			Size:      pulumi.String("db-s-2vcpu-2gb"),
 * 			Region:    pulumi.String(digitalocean.RegionNYC1),
 * 			NodeCount: pulumi.Int(3),
 * 			Tags: pulumi.StringArray{
 * 				pulumi.String("production"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = digitalocean.NewDatabaseKafkaTopic(ctx, "topic-01", &digitalocean.DatabaseKafkaTopicArgs{
 * 			ClusterId:         kafka_example.ID(),
 * 			Name:              pulumi.String("topic-01"),
 * 			PartitionCount:    pulumi.Int(3),
 * 			ReplicationFactor: pulumi.Int(2),
 * 			Configs: digitalocean.DatabaseKafkaTopicConfigArray{
 * 				&digitalocean.DatabaseKafkaTopicConfigArgs{
 * 					CleanupPolicy:                   pulumi.String("compact"),
 * 					CompressionType:                 pulumi.String("uncompressed"),
 * 					DeleteRetentionMs:               pulumi.String("14000"),
 * 					FileDeleteDelayMs:               pulumi.String("170000"),
 * 					FlushMessages:                   pulumi.String("92233"),
 * 					FlushMs:                         pulumi.String("92233720368"),
 * 					IndexIntervalBytes:              pulumi.String("40962"),
 * 					MaxCompactionLagMs:              pulumi.String("9223372036854775807"),
 * 					MaxMessageBytes:                 pulumi.String("1048588"),
 * 					MessageDownConversionEnable:     pulumi.Bool(true),
 * 					MessageFormatVersion:            pulumi.String("3.0-IV1"),
 * 					MessageTimestampDifferenceMaxMs: pulumi.String("9223372036854775807"),
 * 					MessageTimestampType:            pulumi.String("log_append_time"),
 * 					MinCleanableDirtyRatio:          pulumi.Float64(0.5),
 * 					MinCompactionLagMs:              pulumi.String("20000"),
 * 					MinInsyncReplicas:               pulumi.Int(2),
 * 					Preallocate:                     pulumi.Bool(false),
 * 					RetentionBytes:                  pulumi.String("-1"),
 * 					RetentionMs:                     pulumi.String("-1"),
 * 					SegmentBytes:                    pulumi.String("209715200"),
 * 					SegmentIndexBytes:               pulumi.String("10485760"),
 * 					SegmentJitterMs:                 pulumi.String("0"),
 * 					SegmentMs:                       pulumi.String("604800000"),
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```yaml
 * resources:
 *   topic-01:
 *     type: digitalocean:DatabaseKafkaTopic
 *     properties:
 *       clusterId: ${["kafka-example"].id}
 *       name: topic-01
 *       partitionCount: 3
 *       replicationFactor: 2
 *       configs:
 *         - cleanupPolicy: compact
 *           compressionType: uncompressed
 *           deleteRetentionMs: 14000
 *           fileDeleteDelayMs: 170000
 *           flushMessages: 92233
 *           flushMs: 9.2233720368e+10
 *           indexIntervalBytes: 40962
 *           maxCompactionLagMs: 9.223372036854776e+18
 *           maxMessageBytes: 1.048588e+06
 *           messageDownConversionEnable: true
 *           messageFormatVersion: 3.0-IV1
 *           messageTimestampDifferenceMaxMs: 9.223372036854776e+18
 *           messageTimestampType: log_append_time
 *           minCleanableDirtyRatio: 0.5
 *           minCompactionLagMs: 20000
 *           minInsyncReplicas: 2
 *           preallocate: false
 *           retentionBytes: -1
 *           retentionMs: -1
 *           segmentBytes: 2.097152e+08
 *           segmentIndexBytes: 1.048576e+07
 *           segmentJitterMs: 0
 *           segmentMs: 6.048e+08
 *   kafka-example:
 *     type: digitalocean:DatabaseCluster
 *     properties:
 *       name: example-kafka-cluster
 *       engine: kafka
 *       version: '3.5'
 *       size: db-s-2vcpu-2gb
 *       region: nyc1
 *       nodeCount: 3
 *       tags:
 *         - production
 * ```
 * 
 * ## Import
 * Topics can be imported using the `id` of the source cluster and the `name` of the topic joined with a comma. For example:
 * ```sh
 * $ pulumi import digitalocean:index/databaseKafkaTopic:DatabaseKafkaTopic topic-01 245bcfd0-7f31-4ce6-a2bc-475a116cca97,topic-01
 * ```
 */
public class DatabaseKafkaTopic internal constructor(
    override val javaResource: com.pulumi.digitalocean.DatabaseKafkaTopic,
) : KotlinCustomResource(javaResource, DatabaseKafkaTopicMapper) {
    /**
     * The ID of the source database cluster. Note: This must be a Kafka cluster.
     */
    public val clusterId: Output
        get() = javaResource.clusterId().applyValue({ args0 -> args0 })

    /**
     * A set of advanced configuration parameters. Defaults will be set for any of the parameters that are not included.
     * The `config` block is documented below.
     */
    public val configs: Output>
        get() = javaResource.configs().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 ->
                    toKotlin(args0)
                })
            })
        })

    /**
     * The name for the topic.
     */
    public val name: Output
        get() = javaResource.name().applyValue({ args0 -> args0 })

    /**
     * The number of partitions for the topic. Default and minimum set at 3, maximum is 2048.
     */
    public val partitionCount: Output?
        get() = javaResource.partitionCount().applyValue({ args0 ->
            args0.map({ args0 ->
                args0
            }).orElse(null)
        })

    /**
     * The number of nodes that topics are replicated across. Default and minimum set at 2, maximum is the number of nodes in the cluster.
     */
    public val replicationFactor: Output?
        get() = javaResource.replicationFactor().applyValue({ args0 ->
            args0.map({ args0 ->
                args0
            }).orElse(null)
        })

    /**
     * The current status of the topic. Possible values are 'active', 'configuring', and 'deleting'.
     */
    public val state: Output
        get() = javaResource.state().applyValue({ args0 -> args0 })
}

public object DatabaseKafkaTopicMapper : ResourceMapper {
    override fun supportsMappingOfType(javaResource: Resource): Boolean =
        com.pulumi.digitalocean.DatabaseKafkaTopic::class == javaResource::class

    override fun map(javaResource: Resource): DatabaseKafkaTopic = DatabaseKafkaTopic(
        javaResource as
            com.pulumi.digitalocean.DatabaseKafkaTopic,
    )
}

/**
 * @see [DatabaseKafkaTopic].
 * @param name The _unique_ name of the resulting resource.
 * @param block Builder for [DatabaseKafkaTopic].
 */
public suspend fun databaseKafkaTopic(
    name: String,
    block: suspend DatabaseKafkaTopicResourceBuilder.() -> Unit,
): DatabaseKafkaTopic {
    val builder = DatabaseKafkaTopicResourceBuilder()
    builder.name(name)
    block(builder)
    return builder.build()
}

/**
 * @see [DatabaseKafkaTopic].
 * @param name The _unique_ name of the resulting resource.
 */
public fun databaseKafkaTopic(name: String): DatabaseKafkaTopic {
    val builder = DatabaseKafkaTopicResourceBuilder()
    builder.name(name)
    return builder.build()
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy