All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin.inputs

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 *
 * @property imageVersion The version of software inside the cluster. It must be one of the supported [Dataproc Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), such as "1.2" (including a subminor version, such as "1.2.29"), or the ["preview" version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version.
 * @property optionalComponents The set of components to activate on the cluster.
 * @property properties The properties to set on daemon config files.
 * Property keys are specified in `prefix:property` format, for example `core:hadoop.tmp.dir`. The following are supported prefixes and their mappings:
 * * capacity-scheduler: `capacity-scheduler.xml`
 * * core: `core-site.xml`
 * * distcp: `distcp-default.xml`
 * * hdfs: `hdfs-site.xml`
 * * hive: `hive-site.xml`
 * * mapred: `mapred-site.xml`
 * * pig: `pig.properties`
 * * spark: `spark-defaults.conf`
 * * yarn: `yarn-site.xml`
 * For more information, see [Cluster properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
 */
public data class WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs(
    public val imageVersion: Output? = null,
    public val optionalComponents: Output>? = null,
    public val properties: Output>? = null,
) :
    ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs =
        com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.builder()
            .imageVersion(imageVersion?.applyValue({ args0 -> args0 }))
            .optionalComponents(optionalComponents?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .properties(
                properties?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            ).build()
}

/**
 * Builder for [WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs].
 */
@PulumiTagMarker
public class WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgsBuilder internal constructor() {
    private var imageVersion: Output? = null

    private var optionalComponents: Output>? = null

    private var properties: Output>? = null

    /**
     * @param value The version of software inside the cluster. It must be one of the supported [Dataproc Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), such as "1.2" (including a subminor version, such as "1.2.29"), or the ["preview" version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version.
     */
    @JvmName("suqtdjkpkwceiilt")
    public suspend fun imageVersion(`value`: Output) {
        this.imageVersion = value
    }

    /**
     * @param value The set of components to activate on the cluster.
     */
    @JvmName("ofmggiqhugxentma")
    public suspend fun optionalComponents(`value`: Output>) {
        this.optionalComponents = value
    }

    @JvmName("qiryiiksxpsqeshf")
    public suspend fun optionalComponents(vararg values: Output) {
        this.optionalComponents = Output.all(values.asList())
    }

    /**
     * @param values The set of components to activate on the cluster.
     */
    @JvmName("taaucsgpkesqgeil")
    public suspend fun optionalComponents(values: List>) {
        this.optionalComponents = Output.all(values)
    }

    /**
     * @param value The properties to set on daemon config files.
     * Property keys are specified in `prefix:property` format, for example `core:hadoop.tmp.dir`. The following are supported prefixes and their mappings:
     * * capacity-scheduler: `capacity-scheduler.xml`
     * * core: `core-site.xml`
     * * distcp: `distcp-default.xml`
     * * hdfs: `hdfs-site.xml`
     * * hive: `hive-site.xml`
     * * mapred: `mapred-site.xml`
     * * pig: `pig.properties`
     * * spark: `spark-defaults.conf`
     * * yarn: `yarn-site.xml`
     * For more information, see [Cluster properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
     */
    @JvmName("shkqofyilqgynawa")
    public suspend fun properties(`value`: Output>) {
        this.properties = value
    }

    /**
     * @param value The version of software inside the cluster. It must be one of the supported [Dataproc Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), such as "1.2" (including a subminor version, such as "1.2.29"), or the ["preview" version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version.
     */
    @JvmName("olhvvfkeomoemkjr")
    public suspend fun imageVersion(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.imageVersion = mapped
    }

    /**
     * @param value The set of components to activate on the cluster.
     */
    @JvmName("ygdkoinqnthygyue")
    public suspend fun optionalComponents(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.optionalComponents = mapped
    }

    /**
     * @param values The set of components to activate on the cluster.
     */
    @JvmName("dglkooncmsolhxdl")
    public suspend fun optionalComponents(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.optionalComponents = mapped
    }

    /**
     * @param value The properties to set on daemon config files.
     * Property keys are specified in `prefix:property` format, for example `core:hadoop.tmp.dir`. The following are supported prefixes and their mappings:
     * * capacity-scheduler: `capacity-scheduler.xml`
     * * core: `core-site.xml`
     * * distcp: `distcp-default.xml`
     * * hdfs: `hdfs-site.xml`
     * * hive: `hive-site.xml`
     * * mapred: `mapred-site.xml`
     * * pig: `pig.properties`
     * * spark: `spark-defaults.conf`
     * * yarn: `yarn-site.xml`
     * For more information, see [Cluster properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
     */
    @JvmName("kbasscvrbadukcdt")
    public suspend fun properties(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.properties = mapped
    }

    /**
     * @param values The properties to set on daemon config files.
     * Property keys are specified in `prefix:property` format, for example `core:hadoop.tmp.dir`. The following are supported prefixes and their mappings:
     * * capacity-scheduler: `capacity-scheduler.xml`
     * * core: `core-site.xml`
     * * distcp: `distcp-default.xml`
     * * hdfs: `hdfs-site.xml`
     * * hive: `hive-site.xml`
     * * mapred: `mapred-site.xml`
     * * pig: `pig.properties`
     * * spark: `spark-defaults.conf`
     * * yarn: `yarn-site.xml`
     * For more information, see [Cluster properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
     */
    @JvmName("kxobcbygngwkchhg")
    public fun properties(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.properties = mapped
    }

    internal fun build(): WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs =
        WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs(
            imageVersion = imageVersion,
            optionalComponents = optionalComponents,
            properties = properties,
        )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy