com.pulumi.azure.hdinsight.kotlin.inputs.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-azure-kotlin Show documentation
Show all versions of pulumi-azure-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azure.hdinsight.kotlin.inputs
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Int
import kotlin.Suppress
import kotlin.jvm.JvmName
/**
*
* @property maxInstanceCount The maximum number of worker nodes to autoscale to based on the cluster's activity.
* @property minInstanceCount The minimum number of worker nodes to autoscale to based on the cluster's activity.
*/
public data class SparkClusterRolesWorkerNodeAutoscaleCapacityArgs(
public val maxInstanceCount: Output,
public val minInstanceCount: Output,
) :
ConvertibleToJava {
override fun toJava():
com.pulumi.azure.hdinsight.inputs.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs =
com.pulumi.azure.hdinsight.inputs.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs.builder()
.maxInstanceCount(maxInstanceCount.applyValue({ args0 -> args0 }))
.minInstanceCount(minInstanceCount.applyValue({ args0 -> args0 })).build()
}
/**
* Builder for [SparkClusterRolesWorkerNodeAutoscaleCapacityArgs].
*/
@PulumiTagMarker
public class SparkClusterRolesWorkerNodeAutoscaleCapacityArgsBuilder internal constructor() {
private var maxInstanceCount: Output? = null
private var minInstanceCount: Output? = null
/**
* @param value The maximum number of worker nodes to autoscale to based on the cluster's activity.
*/
@JvmName("ejcihdrrrvrolbdh")
public suspend fun maxInstanceCount(`value`: Output) {
this.maxInstanceCount = value
}
/**
* @param value The minimum number of worker nodes to autoscale to based on the cluster's activity.
*/
@JvmName("vjuwawjcoqnjqrrk")
public suspend fun minInstanceCount(`value`: Output) {
this.minInstanceCount = value
}
/**
* @param value The maximum number of worker nodes to autoscale to based on the cluster's activity.
*/
@JvmName("leaemafolftfsxdf")
public suspend fun maxInstanceCount(`value`: Int) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.maxInstanceCount = mapped
}
/**
* @param value The minimum number of worker nodes to autoscale to based on the cluster's activity.
*/
@JvmName("vsvifstrqpbwkeon")
public suspend fun minInstanceCount(`value`: Int) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.minInstanceCount = mapped
}
internal fun build(): SparkClusterRolesWorkerNodeAutoscaleCapacityArgs =
SparkClusterRolesWorkerNodeAutoscaleCapacityArgs(
maxInstanceCount = maxInstanceCount ?: throw PulumiNullFieldException("maxInstanceCount"),
minInstanceCount = minInstanceCount ?: throw PulumiNullFieldException("minInstanceCount"),
)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy