com.pulumi.gcp.dataproc.kotlin.inputs.BatchEnvironmentConfigPeripheralsConfigArgs.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-gcp-kotlin Show documentation
Show all versions of pulumi-gcp-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.dataproc.kotlin.inputs
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.inputs.BatchEnvironmentConfigPeripheralsConfigArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.jvm.JvmName
/**
*
* @property metastoreService Resource name of an existing Dataproc Metastore service.
* @property sparkHistoryServerConfig The Spark History Server configuration for the workload.
* Structure is documented below.
*/
public data class BatchEnvironmentConfigPeripheralsConfigArgs(
public val metastoreService: Output? = null,
public val sparkHistoryServerConfig: Output? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.gcp.dataproc.inputs.BatchEnvironmentConfigPeripheralsConfigArgs = com.pulumi.gcp.dataproc.inputs.BatchEnvironmentConfigPeripheralsConfigArgs.builder()
.metastoreService(metastoreService?.applyValue({ args0 -> args0 }))
.sparkHistoryServerConfig(
sparkHistoryServerConfig?.applyValue({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
}),
).build()
}
/**
* Builder for [BatchEnvironmentConfigPeripheralsConfigArgs].
*/
@PulumiTagMarker
public class BatchEnvironmentConfigPeripheralsConfigArgsBuilder internal constructor() {
private var metastoreService: Output? = null
private var sparkHistoryServerConfig:
Output? = null
/**
* @param value Resource name of an existing Dataproc Metastore service.
*/
@JvmName("hbegavvilehgdqjm")
public suspend fun metastoreService(`value`: Output) {
this.metastoreService = value
}
/**
* @param value The Spark History Server configuration for the workload.
* Structure is documented below.
*/
@JvmName("rgvohishvgrkmhhd")
public suspend fun sparkHistoryServerConfig(`value`: Output) {
this.sparkHistoryServerConfig = value
}
/**
* @param value Resource name of an existing Dataproc Metastore service.
*/
@JvmName("cckhoxmcybnkkapf")
public suspend fun metastoreService(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.metastoreService = mapped
}
/**
* @param value The Spark History Server configuration for the workload.
* Structure is documented below.
*/
@JvmName("ssqjoqiisubnnqhj")
public suspend fun sparkHistoryServerConfig(`value`: BatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.sparkHistoryServerConfig = mapped
}
/**
* @param argument The Spark History Server configuration for the workload.
* Structure is documented below.
*/
@JvmName("lilscnejiwpvjubg")
public suspend fun sparkHistoryServerConfig(argument: suspend BatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgsBuilder.() -> Unit) {
val toBeMapped =
BatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgsBuilder().applySuspend {
argument()
}.build()
val mapped = of(toBeMapped)
this.sparkHistoryServerConfig = mapped
}
internal fun build(): BatchEnvironmentConfigPeripheralsConfigArgs =
BatchEnvironmentConfigPeripheralsConfigArgs(
metastoreService = metastoreService,
sparkHistoryServerConfig = sparkHistoryServerConfig,
)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy