All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.aws.redshift.kotlin.LoggingArgs.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.aws.redshift.kotlin

import com.pulumi.aws.redshift.LoggingArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.jvm.JvmName

/**
 * Resource for managing an AWS Redshift Logging configuration.
 * ## Example Usage
 * ### Basic Usage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as aws from "@pulumi/aws";
 * const example = new aws.redshift.Logging("example", {
 *     clusterIdentifier: exampleAwsRedshiftCluster.id,
 *     logDestinationType: "cloudwatch",
 *     logExports: [
 *         "connectionlog",
 *         "userlog",
 *     ],
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_aws as aws
 * example = aws.redshift.Logging("example",
 *     cluster_identifier=example_aws_redshift_cluster["id"],
 *     log_destination_type="cloudwatch",
 *     log_exports=[
 *         "connectionlog",
 *         "userlog",
 *     ])
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Aws = Pulumi.Aws;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Aws.RedShift.Logging("example", new()
 *     {
 *         ClusterIdentifier = exampleAwsRedshiftCluster.Id,
 *         LogDestinationType = "cloudwatch",
 *         LogExports = new[]
 *         {
 *             "connectionlog",
 *             "userlog",
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/redshift"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := redshift.NewLogging(ctx, "example", &redshift.LoggingArgs{
 * 			ClusterIdentifier:  pulumi.Any(exampleAwsRedshiftCluster.Id),
 * 			LogDestinationType: pulumi.String("cloudwatch"),
 * 			LogExports: pulumi.StringArray{
 * 				pulumi.String("connectionlog"),
 * 				pulumi.String("userlog"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.redshift.Logging;
 * import com.pulumi.aws.redshift.LoggingArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new Logging("example", LoggingArgs.builder()
 *             .clusterIdentifier(exampleAwsRedshiftCluster.id())
 *             .logDestinationType("cloudwatch")
 *             .logExports(
 *                 "connectionlog",
 *                 "userlog")
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: aws:redshift:Logging
 *     properties:
 *       clusterIdentifier: ${exampleAwsRedshiftCluster.id}
 *       logDestinationType: cloudwatch
 *       logExports:
 *         - connectionlog
 *         - userlog
 * ```
 * 
 * ### S3 Destination Type
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as aws from "@pulumi/aws";
 * const example = new aws.redshift.Logging("example", {
 *     clusterIdentifier: exampleAwsRedshiftCluster.id,
 *     logDestinationType: "s3",
 *     bucketName: exampleAwsS3Bucket.id,
 *     s3KeyPrefix: "example-prefix/",
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_aws as aws
 * example = aws.redshift.Logging("example",
 *     cluster_identifier=example_aws_redshift_cluster["id"],
 *     log_destination_type="s3",
 *     bucket_name=example_aws_s3_bucket["id"],
 *     s3_key_prefix="example-prefix/")
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Aws = Pulumi.Aws;
 * return await Deployment.RunAsync(() =>
 * {
 *     var example = new Aws.RedShift.Logging("example", new()
 *     {
 *         ClusterIdentifier = exampleAwsRedshiftCluster.Id,
 *         LogDestinationType = "s3",
 *         BucketName = exampleAwsS3Bucket.Id,
 *         S3KeyPrefix = "example-prefix/",
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/redshift"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := redshift.NewLogging(ctx, "example", &redshift.LoggingArgs{
 * 			ClusterIdentifier:  pulumi.Any(exampleAwsRedshiftCluster.Id),
 * 			LogDestinationType: pulumi.String("s3"),
 * 			BucketName:         pulumi.Any(exampleAwsS3Bucket.Id),
 * 			S3KeyPrefix:        pulumi.String("example-prefix/"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.redshift.Logging;
 * import com.pulumi.aws.redshift.LoggingArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var example = new Logging("example", LoggingArgs.builder()
 *             .clusterIdentifier(exampleAwsRedshiftCluster.id())
 *             .logDestinationType("s3")
 *             .bucketName(exampleAwsS3Bucket.id())
 *             .s3KeyPrefix("example-prefix/")
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example:
 *     type: aws:redshift:Logging
 *     properties:
 *       clusterIdentifier: ${exampleAwsRedshiftCluster.id}
 *       logDestinationType: s3
 *       bucketName: ${exampleAwsS3Bucket.id}
 *       s3KeyPrefix: example-prefix/
 * ```
 * 
 * ## Import
 * Using `pulumi import`, import Redshift Logging using the `id`. For example:
 * ```sh
 * $ pulumi import aws:redshift/logging:Logging example cluster-id-12345678
 * ```
 * @property bucketName Name of an existing S3 bucket where the log files are to be stored. Required when `log_destination_type` is `s3`. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging)
 * @property clusterIdentifier Identifier of the source cluster.
 * The following arguments are optional:
 * @property logDestinationType Log destination type. Valid values are `s3` and `cloudwatch`.
 * @property logExports Collection of exported log types. Required when `log_destination_type` is `cloudwatch`. Valid values are `connectionlog`, `useractivitylog`, and `userlog`.
 * @property s3KeyPrefix Prefix applied to the log file names.
 */
public data class LoggingArgs(
    public val bucketName: Output? = null,
    public val clusterIdentifier: Output? = null,
    public val logDestinationType: Output? = null,
    public val logExports: Output>? = null,
    public val s3KeyPrefix: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.aws.redshift.LoggingArgs =
        com.pulumi.aws.redshift.LoggingArgs.builder()
            .bucketName(bucketName?.applyValue({ args0 -> args0 }))
            .clusterIdentifier(clusterIdentifier?.applyValue({ args0 -> args0 }))
            .logDestinationType(logDestinationType?.applyValue({ args0 -> args0 }))
            .logExports(logExports?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .s3KeyPrefix(s3KeyPrefix?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [LoggingArgs].
 */
@PulumiTagMarker
public class LoggingArgsBuilder internal constructor() {
    private var bucketName: Output? = null

    private var clusterIdentifier: Output? = null

    private var logDestinationType: Output? = null

    private var logExports: Output>? = null

    private var s3KeyPrefix: Output? = null

    /**
     * @param value Name of an existing S3 bucket where the log files are to be stored. Required when `log_destination_type` is `s3`. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging)
     */
    @JvmName("pmuxhrvbhqcsdtpf")
    public suspend fun bucketName(`value`: Output) {
        this.bucketName = value
    }

    /**
     * @param value Identifier of the source cluster.
     * The following arguments are optional:
     */
    @JvmName("qsdypiuhsnerqhax")
    public suspend fun clusterIdentifier(`value`: Output) {
        this.clusterIdentifier = value
    }

    /**
     * @param value Log destination type. Valid values are `s3` and `cloudwatch`.
     */
    @JvmName("khkimnefessgegbh")
    public suspend fun logDestinationType(`value`: Output) {
        this.logDestinationType = value
    }

    /**
     * @param value Collection of exported log types. Required when `log_destination_type` is `cloudwatch`. Valid values are `connectionlog`, `useractivitylog`, and `userlog`.
     */
    @JvmName("fxabadltjspognbx")
    public suspend fun logExports(`value`: Output>) {
        this.logExports = value
    }

    @JvmName("macldwcoyksbcrjc")
    public suspend fun logExports(vararg values: Output) {
        this.logExports = Output.all(values.asList())
    }

    /**
     * @param values Collection of exported log types. Required when `log_destination_type` is `cloudwatch`. Valid values are `connectionlog`, `useractivitylog`, and `userlog`.
     */
    @JvmName("yhwhgkwmogvfxnpb")
    public suspend fun logExports(values: List>) {
        this.logExports = Output.all(values)
    }

    /**
     * @param value Prefix applied to the log file names.
     */
    @JvmName("mlmduheyvwdpvigs")
    public suspend fun s3KeyPrefix(`value`: Output) {
        this.s3KeyPrefix = value
    }

    /**
     * @param value Name of an existing S3 bucket where the log files are to be stored. Required when `log_destination_type` is `s3`. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging)
     */
    @JvmName("aeinltafnmkqcsoj")
    public suspend fun bucketName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.bucketName = mapped
    }

    /**
     * @param value Identifier of the source cluster.
     * The following arguments are optional:
     */
    @JvmName("cnjutrdhpkwjylss")
    public suspend fun clusterIdentifier(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.clusterIdentifier = mapped
    }

    /**
     * @param value Log destination type. Valid values are `s3` and `cloudwatch`.
     */
    @JvmName("njoeqcjdogrnxlae")
    public suspend fun logDestinationType(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.logDestinationType = mapped
    }

    /**
     * @param value Collection of exported log types. Required when `log_destination_type` is `cloudwatch`. Valid values are `connectionlog`, `useractivitylog`, and `userlog`.
     */
    @JvmName("ufkapdietrllxuof")
    public suspend fun logExports(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.logExports = mapped
    }

    /**
     * @param values Collection of exported log types. Required when `log_destination_type` is `cloudwatch`. Valid values are `connectionlog`, `useractivitylog`, and `userlog`.
     */
    @JvmName("jxjfntmcpfsuxqpi")
    public suspend fun logExports(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.logExports = mapped
    }

    /**
     * @param value Prefix applied to the log file names.
     */
    @JvmName("kevcttxsqcgvcvnm")
    public suspend fun s3KeyPrefix(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.s3KeyPrefix = mapped
    }

    internal fun build(): LoggingArgs = LoggingArgs(
        bucketName = bucketName,
        clusterIdentifier = clusterIdentifier,
        logDestinationType = logDestinationType,
        logExports = logExports,
        s3KeyPrefix = s3KeyPrefix,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy