All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.aws.redshift.kotlin.inputs.ClusterLoggingArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.66.3.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.aws.redshift.kotlin.inputs

import com.pulumi.aws.redshift.inputs.ClusterLoggingArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Boolean
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.jvm.JvmName

/**
 *
 * @property bucketName The name of an existing S3 bucket where the log files are to be stored. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions.
 * For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging)
 * @property enable Enables logging information such as queries and connection attempts, for the specified Amazon Redshift cluster.
 * @property logDestinationType The log destination type. An enum with possible values of `s3` and `cloudwatch`.
 * @property logExports The collection of exported log types. Log types include the connection log, user log and user activity log. Required when `log_destination_type` is `cloudwatch`. Valid log types are `connectionlog`, `userlog`, and `useractivitylog`.
 * @property s3KeyPrefix The prefix applied to the log file names.
 */
public data class ClusterLoggingArgs(
    public val bucketName: Output? = null,
    public val enable: Output,
    public val logDestinationType: Output? = null,
    public val logExports: Output>? = null,
    public val s3KeyPrefix: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.aws.redshift.inputs.ClusterLoggingArgs =
        com.pulumi.aws.redshift.inputs.ClusterLoggingArgs.builder()
            .bucketName(bucketName?.applyValue({ args0 -> args0 }))
            .enable(enable.applyValue({ args0 -> args0 }))
            .logDestinationType(logDestinationType?.applyValue({ args0 -> args0 }))
            .logExports(logExports?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .s3KeyPrefix(s3KeyPrefix?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [ClusterLoggingArgs].
 */
@PulumiTagMarker
public class ClusterLoggingArgsBuilder internal constructor() {
    private var bucketName: Output? = null

    private var enable: Output? = null

    private var logDestinationType: Output? = null

    private var logExports: Output>? = null

    private var s3KeyPrefix: Output? = null

    /**
     * @param value The name of an existing S3 bucket where the log files are to be stored. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions.
     * For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging)
     */
    @JvmName("eexdqahofmcccpxu")
    public suspend fun bucketName(`value`: Output) {
        this.bucketName = value
    }

    /**
     * @param value Enables logging information such as queries and connection attempts, for the specified Amazon Redshift cluster.
     */
    @JvmName("mbhnlgnslsikjxyh")
    public suspend fun enable(`value`: Output) {
        this.enable = value
    }

    /**
     * @param value The log destination type. An enum with possible values of `s3` and `cloudwatch`.
     */
    @JvmName("dqjjpmqqjvikpsaq")
    public suspend fun logDestinationType(`value`: Output) {
        this.logDestinationType = value
    }

    /**
     * @param value The collection of exported log types. Log types include the connection log, user log and user activity log. Required when `log_destination_type` is `cloudwatch`. Valid log types are `connectionlog`, `userlog`, and `useractivitylog`.
     */
    @JvmName("lorwbemtexyllvvr")
    public suspend fun logExports(`value`: Output>) {
        this.logExports = value
    }

    @JvmName("aopagwqetcacqacx")
    public suspend fun logExports(vararg values: Output) {
        this.logExports = Output.all(values.asList())
    }

    /**
     * @param values The collection of exported log types. Log types include the connection log, user log and user activity log. Required when `log_destination_type` is `cloudwatch`. Valid log types are `connectionlog`, `userlog`, and `useractivitylog`.
     */
    @JvmName("dbfvcpyttqehqajs")
    public suspend fun logExports(values: List>) {
        this.logExports = Output.all(values)
    }

    /**
     * @param value The prefix applied to the log file names.
     */
    @JvmName("jtqhbagejhkbvynv")
    public suspend fun s3KeyPrefix(`value`: Output) {
        this.s3KeyPrefix = value
    }

    /**
     * @param value The name of an existing S3 bucket where the log files are to be stored. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions.
     * For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging)
     */
    @JvmName("jllfnjuhfimtmwqx")
    public suspend fun bucketName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.bucketName = mapped
    }

    /**
     * @param value Enables logging information such as queries and connection attempts, for the specified Amazon Redshift cluster.
     */
    @JvmName("xjjmmwtvblngbwat")
    public suspend fun enable(`value`: Boolean) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.enable = mapped
    }

    /**
     * @param value The log destination type. An enum with possible values of `s3` and `cloudwatch`.
     */
    @JvmName("idextifwrwtacpvm")
    public suspend fun logDestinationType(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.logDestinationType = mapped
    }

    /**
     * @param value The collection of exported log types. Log types include the connection log, user log and user activity log. Required when `log_destination_type` is `cloudwatch`. Valid log types are `connectionlog`, `userlog`, and `useractivitylog`.
     */
    @JvmName("jlrkyomfphdsljyl")
    public suspend fun logExports(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.logExports = mapped
    }

    /**
     * @param values The collection of exported log types. Log types include the connection log, user log and user activity log. Required when `log_destination_type` is `cloudwatch`. Valid log types are `connectionlog`, `userlog`, and `useractivitylog`.
     */
    @JvmName("vwvctdujjankemvv")
    public suspend fun logExports(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.logExports = mapped
    }

    /**
     * @param value The prefix applied to the log file names.
     */
    @JvmName("rqiyjpeqlipiyhwr")
    public suspend fun s3KeyPrefix(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.s3KeyPrefix = mapped
    }

    internal fun build(): ClusterLoggingArgs = ClusterLoggingArgs(
        bucketName = bucketName,
        enable = enable ?: throw PulumiNullFieldException("enable"),
        logDestinationType = logDestinationType,
        logExports = logExports,
        s3KeyPrefix = s3KeyPrefix,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy