All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.datafactory.kotlin.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.14.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azure.datafactory.kotlin.inputs

import com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 *
 * @property clusterVersion Spark version of a the cluster.
 * @property customTags Tags for the cluster resource.
 * @property driverNodeType Driver node type for the cluster.
 * @property initScripts User defined initialization scripts for the cluster.
 * @property logDestination Location to deliver Spark driver, worker, and event logs.
 * @property maxNumberOfWorkers Specifies the maximum number of worker nodes. It should be between 1 and 25000.
 * @property minNumberOfWorkers Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to `1`.
 * @property nodeType Node type for the new cluster.
 * @property sparkConfig User-specified Spark configuration variables key-value pairs.
 * @property sparkEnvironmentVariables User-specified Spark environment variables key-value pairs.
 */
public data class LinkedServiceAzureDatabricksNewClusterConfigArgs(
    public val clusterVersion: Output,
    public val customTags: Output>? = null,
    public val driverNodeType: Output? = null,
    public val initScripts: Output>? = null,
    public val logDestination: Output? = null,
    public val maxNumberOfWorkers: Output? = null,
    public val minNumberOfWorkers: Output? = null,
    public val nodeType: Output,
    public val sparkConfig: Output>? = null,
    public val sparkEnvironmentVariables: Output>? = null,
) :
    ConvertibleToJava {
    override fun toJava():
        com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs =
        com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
            .clusterVersion(clusterVersion.applyValue({ args0 -> args0 }))
            .customTags(
                customTags?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .driverNodeType(driverNodeType?.applyValue({ args0 -> args0 }))
            .initScripts(initScripts?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .logDestination(logDestination?.applyValue({ args0 -> args0 }))
            .maxNumberOfWorkers(maxNumberOfWorkers?.applyValue({ args0 -> args0 }))
            .minNumberOfWorkers(minNumberOfWorkers?.applyValue({ args0 -> args0 }))
            .nodeType(nodeType.applyValue({ args0 -> args0 }))
            .sparkConfig(
                sparkConfig?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .sparkEnvironmentVariables(
                sparkEnvironmentVariables?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            ).build()
}

/**
 * Builder for [LinkedServiceAzureDatabricksNewClusterConfigArgs].
 */
@PulumiTagMarker
public class LinkedServiceAzureDatabricksNewClusterConfigArgsBuilder internal constructor() {
    private var clusterVersion: Output? = null

    private var customTags: Output>? = null

    private var driverNodeType: Output? = null

    private var initScripts: Output>? = null

    private var logDestination: Output? = null

    private var maxNumberOfWorkers: Output? = null

    private var minNumberOfWorkers: Output? = null

    private var nodeType: Output? = null

    private var sparkConfig: Output>? = null

    private var sparkEnvironmentVariables: Output>? = null

    /**
     * @param value Spark version of a the cluster.
     */
    @JvmName("brqnvhcfsroeqbut")
    public suspend fun clusterVersion(`value`: Output) {
        this.clusterVersion = value
    }

    /**
     * @param value Tags for the cluster resource.
     */
    @JvmName("hftfnvhajotoeibg")
    public suspend fun customTags(`value`: Output>) {
        this.customTags = value
    }

    /**
     * @param value Driver node type for the cluster.
     */
    @JvmName("fxnlgxrqlnubamlp")
    public suspend fun driverNodeType(`value`: Output) {
        this.driverNodeType = value
    }

    /**
     * @param value User defined initialization scripts for the cluster.
     */
    @JvmName("adcqxpwbryewpqce")
    public suspend fun initScripts(`value`: Output>) {
        this.initScripts = value
    }

    @JvmName("nvlekmdswqqfgfqp")
    public suspend fun initScripts(vararg values: Output) {
        this.initScripts = Output.all(values.asList())
    }

    /**
     * @param values User defined initialization scripts for the cluster.
     */
    @JvmName("fohhrkplrbfxclmn")
    public suspend fun initScripts(values: List>) {
        this.initScripts = Output.all(values)
    }

    /**
     * @param value Location to deliver Spark driver, worker, and event logs.
     */
    @JvmName("svmyresltakvjref")
    public suspend fun logDestination(`value`: Output) {
        this.logDestination = value
    }

    /**
     * @param value Specifies the maximum number of worker nodes. It should be between 1 and 25000.
     */
    @JvmName("xknwpsdefwpupmfw")
    public suspend fun maxNumberOfWorkers(`value`: Output) {
        this.maxNumberOfWorkers = value
    }

    /**
     * @param value Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to `1`.
     */
    @JvmName("hfevvddcmtahyxcr")
    public suspend fun minNumberOfWorkers(`value`: Output) {
        this.minNumberOfWorkers = value
    }

    /**
     * @param value Node type for the new cluster.
     */
    @JvmName("wqubdvxahrswckts")
    public suspend fun nodeType(`value`: Output) {
        this.nodeType = value
    }

    /**
     * @param value User-specified Spark configuration variables key-value pairs.
     */
    @JvmName("sciqcyesicsvbpjg")
    public suspend fun sparkConfig(`value`: Output>) {
        this.sparkConfig = value
    }

    /**
     * @param value User-specified Spark environment variables key-value pairs.
     */
    @JvmName("cjgpqrnjhcogyyju")
    public suspend fun sparkEnvironmentVariables(`value`: Output>) {
        this.sparkEnvironmentVariables = value
    }

    /**
     * @param value Spark version of a the cluster.
     */
    @JvmName("jfvuvvqixmpfnnym")
    public suspend fun clusterVersion(`value`: String) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.clusterVersion = mapped
    }

    /**
     * @param value Tags for the cluster resource.
     */
    @JvmName("iptpcstsdddvdatq")
    public suspend fun customTags(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.customTags = mapped
    }

    /**
     * @param values Tags for the cluster resource.
     */
    @JvmName("iffogeugelcxeabv")
    public fun customTags(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.customTags = mapped
    }

    /**
     * @param value Driver node type for the cluster.
     */
    @JvmName("kunnbhpbibdlchqs")
    public suspend fun driverNodeType(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.driverNodeType = mapped
    }

    /**
     * @param value User defined initialization scripts for the cluster.
     */
    @JvmName("iskjpwrghtwehumf")
    public suspend fun initScripts(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.initScripts = mapped
    }

    /**
     * @param values User defined initialization scripts for the cluster.
     */
    @JvmName("oonnijmmltaubeyr")
    public suspend fun initScripts(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.initScripts = mapped
    }

    /**
     * @param value Location to deliver Spark driver, worker, and event logs.
     */
    @JvmName("viqhfjgfaorcfsne")
    public suspend fun logDestination(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.logDestination = mapped
    }

    /**
     * @param value Specifies the maximum number of worker nodes. It should be between 1 and 25000.
     */
    @JvmName("gayrrgmvvmteicbl")
    public suspend fun maxNumberOfWorkers(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.maxNumberOfWorkers = mapped
    }

    /**
     * @param value Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to `1`.
     */
    @JvmName("ajimvbohrbfxpfjh")
    public suspend fun minNumberOfWorkers(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.minNumberOfWorkers = mapped
    }

    /**
     * @param value Node type for the new cluster.
     */
    @JvmName("uejtlhjjmwdjdaul")
    public suspend fun nodeType(`value`: String) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.nodeType = mapped
    }

    /**
     * @param value User-specified Spark configuration variables key-value pairs.
     */
    @JvmName("rlmxegbrihcjfavn")
    public suspend fun sparkConfig(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sparkConfig = mapped
    }

    /**
     * @param values User-specified Spark configuration variables key-value pairs.
     */
    @JvmName("akkwxrkqmcqaeurv")
    public fun sparkConfig(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.sparkConfig = mapped
    }

    /**
     * @param value User-specified Spark environment variables key-value pairs.
     */
    @JvmName("konckfcqymwdelbw")
    public suspend fun sparkEnvironmentVariables(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sparkEnvironmentVariables = mapped
    }

    /**
     * @param values User-specified Spark environment variables key-value pairs.
     */
    @JvmName("pkwecocbpcbmawuk")
    public fun sparkEnvironmentVariables(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.sparkEnvironmentVariables = mapped
    }

    internal fun build(): LinkedServiceAzureDatabricksNewClusterConfigArgs =
        LinkedServiceAzureDatabricksNewClusterConfigArgs(
            clusterVersion = clusterVersion ?: throw PulumiNullFieldException("clusterVersion"),
            customTags = customTags,
            driverNodeType = driverNodeType,
            initScripts = initScripts,
            logDestination = logDestination,
            maxNumberOfWorkers = maxNumberOfWorkers,
            minNumberOfWorkers = minNumberOfWorkers,
            nodeType = nodeType ?: throw PulumiNullFieldException("nodeType"),
            sparkConfig = sparkConfig,
            sparkEnvironmentVariables = sparkEnvironmentVariables,
        )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy