All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.azure.hdinsight.kotlin.inputs.SparkClusterStorageAccountArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.14.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.azure.hdinsight.kotlin.inputs

import com.pulumi.azure.hdinsight.inputs.SparkClusterStorageAccountArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Boolean
import kotlin.String
import kotlin.Suppress
import kotlin.jvm.JvmName

/**
 *
 * @property isDefault Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
 * > **NOTE:** One of the `storage_account` or `storage_account_gen2` blocks must be marked as the default.
 * @property storageAccountKey The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
 * @property storageContainerId The ID of the Storage Container. Changing this forces a new resource to be created.
 * > **NOTE:** This can be obtained from the `id` of the `azure.storage.Container` resource.
 * @property storageResourceId The ID of the Storage Account. Changing this forces a new resource to be created.
 */
public data class SparkClusterStorageAccountArgs(
    public val isDefault: Output,
    public val storageAccountKey: Output,
    public val storageContainerId: Output,
    public val storageResourceId: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.azure.hdinsight.inputs.SparkClusterStorageAccountArgs =
        com.pulumi.azure.hdinsight.inputs.SparkClusterStorageAccountArgs.builder()
            .isDefault(isDefault.applyValue({ args0 -> args0 }))
            .storageAccountKey(storageAccountKey.applyValue({ args0 -> args0 }))
            .storageContainerId(storageContainerId.applyValue({ args0 -> args0 }))
            .storageResourceId(storageResourceId?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [SparkClusterStorageAccountArgs].
 */
@PulumiTagMarker
public class SparkClusterStorageAccountArgsBuilder internal constructor() {
    private var isDefault: Output? = null

    private var storageAccountKey: Output? = null

    private var storageContainerId: Output? = null

    private var storageResourceId: Output? = null

    /**
     * @param value Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
     * > **NOTE:** One of the `storage_account` or `storage_account_gen2` blocks must be marked as the default.
     */
    @JvmName("lantxvqedxeubyri")
    public suspend fun isDefault(`value`: Output) {
        this.isDefault = value
    }

    /**
     * @param value The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
     */
    @JvmName("rarrskcypohbennl")
    public suspend fun storageAccountKey(`value`: Output) {
        this.storageAccountKey = value
    }

    /**
     * @param value The ID of the Storage Container. Changing this forces a new resource to be created.
     * > **NOTE:** This can be obtained from the `id` of the `azure.storage.Container` resource.
     */
    @JvmName("dneewiegavcljipl")
    public suspend fun storageContainerId(`value`: Output) {
        this.storageContainerId = value
    }

    /**
     * @param value The ID of the Storage Account. Changing this forces a new resource to be created.
     */
    @JvmName("jtqcellbndhxmqcx")
    public suspend fun storageResourceId(`value`: Output) {
        this.storageResourceId = value
    }

    /**
     * @param value Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
     * > **NOTE:** One of the `storage_account` or `storage_account_gen2` blocks must be marked as the default.
     */
    @JvmName("uarrcakjgoascakx")
    public suspend fun isDefault(`value`: Boolean) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.isDefault = mapped
    }

    /**
     * @param value The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
     */
    @JvmName("ggynbuphqtvmitbn")
    public suspend fun storageAccountKey(`value`: String) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.storageAccountKey = mapped
    }

    /**
     * @param value The ID of the Storage Container. Changing this forces a new resource to be created.
     * > **NOTE:** This can be obtained from the `id` of the `azure.storage.Container` resource.
     */
    @JvmName("mmachrvmgpcwttbj")
    public suspend fun storageContainerId(`value`: String) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.storageContainerId = mapped
    }

    /**
     * @param value The ID of the Storage Account. Changing this forces a new resource to be created.
     */
    @JvmName("fjmdattjfiujpwqc")
    public suspend fun storageResourceId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.storageResourceId = mapped
    }

    internal fun build(): SparkClusterStorageAccountArgs = SparkClusterStorageAccountArgs(
        isDefault = isDefault ?: throw PulumiNullFieldException("isDefault"),
        storageAccountKey = storageAccountKey ?: throw PulumiNullFieldException("storageAccountKey"),
        storageContainerId = storageContainerId ?: throw PulumiNullFieldException("storageContainerId"),
        storageResourceId = storageResourceId,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy