Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.pulumi.azurenative.datafactory.kotlin.inputs.SynapseNotebookActivityArgs.kt Maven / Gradle / Ivy
Go to download
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azurenative.datafactory.kotlin.inputs
import com.pulumi.azurenative.datafactory.inputs.SynapseNotebookActivityArgs.builder
import com.pulumi.azurenative.datafactory.kotlin.enums.ActivityOnInactiveMarkAs
import com.pulumi.azurenative.datafactory.kotlin.enums.ActivityState
import com.pulumi.azurenative.datafactory.kotlin.enums.ConfigurationType
import com.pulumi.core.Either
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Any
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName
/**
* Execute Synapse notebook activity.
* @property conf Spark configuration properties, which will override the 'conf' of the notebook you provide.
* @property configurationType The type of the spark config.
* @property dependsOn Activity depends on condition.
* @property description Activity description.
* @property driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string).
* @property executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string).
* @property linkedServiceName Linked service reference.
* @property name Activity name.
* @property notebook Synapse notebook reference.
* @property numExecutors Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType integer).
* @property onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
* @property parameters Notebook parameters.
* @property policy Activity policy.
* @property sparkConfig Spark configuration property.
* @property sparkPool The name of the big data pool which will be used to execute the notebook.
* @property state Activity state. This is an optional property and if not provided, the state will be Active by default.
* @property targetSparkConfiguration The spark configuration of the spark job.
* @property type Type of activity.
* Expected value is 'SynapseNotebook'.
* @property userProperties Activity user properties.
*/
public data class SynapseNotebookActivityArgs(
public val conf: Output? = null,
public val configurationType: Output>? = null,
public val dependsOn: Output>? = null,
public val description: Output? = null,
public val driverSize: Output? = null,
public val executorSize: Output? = null,
public val linkedServiceName: Output? = null,
public val name: Output,
public val notebook: Output,
public val numExecutors: Output? = null,
public val onInactiveMarkAs: Output>? = null,
public val parameters: Output>? = null,
public val policy: Output? = null,
public val sparkConfig: Output>? = null,
public val sparkPool: Output? = null,
public val state: Output>? = null,
public val targetSparkConfiguration: Output? =
null,
public val type: Output,
public val userProperties: Output>? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.azurenative.datafactory.inputs.SynapseNotebookActivityArgs =
com.pulumi.azurenative.datafactory.inputs.SynapseNotebookActivityArgs.builder()
.conf(conf?.applyValue({ args0 -> args0 }))
.configurationType(
configurationType?.applyValue({ args0 ->
args0.transform(
{ args0 -> args0 },
{ args0 -> args0.let({ args0 -> args0.toJava() }) },
)
}),
)
.dependsOn(
dependsOn?.applyValue({ args0 ->
args0.map({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
})
}),
)
.description(description?.applyValue({ args0 -> args0 }))
.driverSize(driverSize?.applyValue({ args0 -> args0 }))
.executorSize(executorSize?.applyValue({ args0 -> args0 }))
.linkedServiceName(linkedServiceName?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.name(name.applyValue({ args0 -> args0 }))
.notebook(notebook.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.numExecutors(numExecutors?.applyValue({ args0 -> args0 }))
.onInactiveMarkAs(
onInactiveMarkAs?.applyValue({ args0 ->
args0.transform(
{ args0 -> args0 },
{ args0 -> args0.let({ args0 -> args0.toJava() }) },
)
}),
)
.parameters(
parameters?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value.let({ args0 -> args0.toJava() }))
}).toMap()
}),
)
.policy(policy?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.sparkConfig(
sparkConfig?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value)
}).toMap()
}),
)
.sparkPool(sparkPool?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.state(
state?.applyValue({ args0 ->
args0.transform({ args0 -> args0 }, { args0 ->
args0.let({ args0 ->
args0.toJava()
})
})
}),
)
.targetSparkConfiguration(
targetSparkConfiguration?.applyValue({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
}),
)
.type(type.applyValue({ args0 -> args0 }))
.userProperties(
userProperties?.applyValue({ args0 ->
args0.map({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
})
}),
).build()
}
/**
* Builder for [SynapseNotebookActivityArgs].
*/
@PulumiTagMarker
public class SynapseNotebookActivityArgsBuilder internal constructor() {
private var conf: Output? = null
private var configurationType: Output>? = null
private var dependsOn: Output>? = null
private var description: Output? = null
private var driverSize: Output? = null
private var executorSize: Output? = null
private var linkedServiceName: Output? = null
private var name: Output? = null
private var notebook: Output? = null
private var numExecutors: Output? = null
private var onInactiveMarkAs: Output>? = null
private var parameters: Output>? = null
private var policy: Output? = null
private var sparkConfig: Output>? = null
private var sparkPool: Output? = null
private var state: Output>? = null
private var targetSparkConfiguration: Output? =
null
private var type: Output? = null
private var userProperties: Output>? = null
/**
* @param value Spark configuration properties, which will override the 'conf' of the notebook you provide.
*/
@JvmName("weguamqeyfjvwqxk")
public suspend fun conf(`value`: Output) {
this.conf = value
}
/**
* @param value The type of the spark config.
*/
@JvmName("qlqnaxptxnnrbikl")
public suspend fun configurationType(`value`: Output>) {
this.configurationType = value
}
/**
* @param value Activity depends on condition.
*/
@JvmName("htdxwlfkognogijw")
public suspend fun dependsOn(`value`: Output>) {
this.dependsOn = value
}
@JvmName("jkduryolbqqlwxlt")
public suspend fun dependsOn(vararg values: Output) {
this.dependsOn = Output.all(values.asList())
}
/**
* @param values Activity depends on condition.
*/
@JvmName("vxjlxcdfbwuewfcq")
public suspend fun dependsOn(values: List>) {
this.dependsOn = Output.all(values)
}
/**
* @param value Activity description.
*/
@JvmName("xumwimsdegbcjrvw")
public suspend fun description(`value`: Output) {
this.description = value
}
/**
* @param value Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string).
*/
@JvmName("xlkceuyebeurtlet")
public suspend fun driverSize(`value`: Output) {
this.driverSize = value
}
/**
* @param value Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string).
*/
@JvmName("spunpheqjhupetpy")
public suspend fun executorSize(`value`: Output) {
this.executorSize = value
}
/**
* @param value Linked service reference.
*/
@JvmName("kgvcjyxvmvfllswv")
public suspend fun linkedServiceName(`value`: Output) {
this.linkedServiceName = value
}
/**
* @param value Activity name.
*/
@JvmName("rtmtkulcqfhfdfnr")
public suspend fun name(`value`: Output) {
this.name = value
}
/**
* @param value Synapse notebook reference.
*/
@JvmName("godoamkwqbwjhxgg")
public suspend fun notebook(`value`: Output) {
this.notebook = value
}
/**
* @param value Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType integer).
*/
@JvmName("ermjesgdxhekmgao")
public suspend fun numExecutors(`value`: Output) {
this.numExecutors = value
}
/**
* @param value Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
*/
@JvmName("bmhckquymyhljxkt")
public suspend fun onInactiveMarkAs(`value`: Output>) {
this.onInactiveMarkAs = value
}
/**
* @param value Notebook parameters.
*/
@JvmName("bxlbaxntmoddrlpd")
public suspend fun parameters(`value`: Output>) {
this.parameters = value
}
/**
* @param value Activity policy.
*/
@JvmName("ptmcnjnlnhjaaecm")
public suspend fun policy(`value`: Output) {
this.policy = value
}
/**
* @param value Spark configuration property.
*/
@JvmName("nowrqynrcouvknit")
public suspend fun sparkConfig(`value`: Output>) {
this.sparkConfig = value
}
/**
* @param value The name of the big data pool which will be used to execute the notebook.
*/
@JvmName("sofhmmtglmsevcab")
public suspend fun sparkPool(`value`: Output) {
this.sparkPool = value
}
/**
* @param value Activity state. This is an optional property and if not provided, the state will be Active by default.
*/
@JvmName("jpwfgebdxqtwsivy")
public suspend fun state(`value`: Output>) {
this.state = value
}
/**
* @param value The spark configuration of the spark job.
*/
@JvmName("jvwrfpcyosjieebv")
public suspend fun targetSparkConfiguration(`value`: Output) {
this.targetSparkConfiguration = value
}
/**
* @param value Type of activity.
* Expected value is 'SynapseNotebook'.
*/
@JvmName("bnxscecwlaysxeqt")
public suspend fun type(`value`: Output) {
this.type = value
}
/**
* @param value Activity user properties.
*/
@JvmName("oytuirdlaqxihmjp")
public suspend fun userProperties(`value`: Output>) {
this.userProperties = value
}
@JvmName("lmoiupsdvoxufjxs")
public suspend fun userProperties(vararg values: Output) {
this.userProperties = Output.all(values.asList())
}
/**
* @param values Activity user properties.
*/
@JvmName("wmukxytvqclokgyj")
public suspend fun userProperties(values: List>) {
this.userProperties = Output.all(values)
}
/**
* @param value Spark configuration properties, which will override the 'conf' of the notebook you provide.
*/
@JvmName("unckineiitpfuamd")
public suspend fun conf(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.conf = mapped
}
/**
* @param value The type of the spark config.
*/
@JvmName("mrpwmlclefgulvms")
public suspend fun configurationType(`value`: Either?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.configurationType = mapped
}
/**
* @param value The type of the spark config.
*/
@JvmName("dxhuthpstjwyttyf")
public fun configurationType(`value`: String) {
val toBeMapped = Either.ofLeft(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.configurationType = mapped
}
/**
* @param value The type of the spark config.
*/
@JvmName("gasqtwnkafbqkwgi")
public fun configurationType(`value`: ConfigurationType) {
val toBeMapped = Either.ofRight(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.configurationType = mapped
}
/**
* @param value Activity depends on condition.
*/
@JvmName("accccwgqjihsdgxh")
public suspend fun dependsOn(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.dependsOn = mapped
}
/**
* @param argument Activity depends on condition.
*/
@JvmName("ustbrilonercpiwl")
public suspend fun dependsOn(argument: List Unit>) {
val toBeMapped = argument.toList().map {
ActivityDependencyArgsBuilder().applySuspend {
it()
}.build()
}
val mapped = of(toBeMapped)
this.dependsOn = mapped
}
/**
* @param argument Activity depends on condition.
*/
@JvmName("otwppvxsuygevkvc")
public suspend fun dependsOn(vararg argument: suspend ActivityDependencyArgsBuilder.() -> Unit) {
val toBeMapped = argument.toList().map {
ActivityDependencyArgsBuilder().applySuspend {
it()
}.build()
}
val mapped = of(toBeMapped)
this.dependsOn = mapped
}
/**
* @param argument Activity depends on condition.
*/
@JvmName("lajjixydlyqfqjhs")
public suspend fun dependsOn(argument: suspend ActivityDependencyArgsBuilder.() -> Unit) {
val toBeMapped = listOf(ActivityDependencyArgsBuilder().applySuspend { argument() }.build())
val mapped = of(toBeMapped)
this.dependsOn = mapped
}
/**
* @param values Activity depends on condition.
*/
@JvmName("jlatjmibuyjnvtnm")
public suspend fun dependsOn(vararg values: ActivityDependencyArgs) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.dependsOn = mapped
}
/**
* @param value Activity description.
*/
@JvmName("rtbhvguhjqsnbkav")
public suspend fun description(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.description = mapped
}
/**
* @param value Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string).
*/
@JvmName("ogperdbtkppflshb")
public suspend fun driverSize(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.driverSize = mapped
}
/**
* @param value Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string).
*/
@JvmName("ifjuewyxvjddndkc")
public suspend fun executorSize(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.executorSize = mapped
}
/**
* @param value Linked service reference.
*/
@JvmName("uyioxflolcwmdpes")
public suspend fun linkedServiceName(`value`: LinkedServiceReferenceArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.linkedServiceName = mapped
}
/**
* @param argument Linked service reference.
*/
@JvmName("ofhchqckwbkxsvfw")
public suspend fun linkedServiceName(argument: suspend LinkedServiceReferenceArgsBuilder.() -> Unit) {
val toBeMapped = LinkedServiceReferenceArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.linkedServiceName = mapped
}
/**
* @param value Activity name.
*/
@JvmName("vgerswfejrfpussm")
public suspend fun name(`value`: String) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.name = mapped
}
/**
* @param value Synapse notebook reference.
*/
@JvmName("dqnnvncbccrdnvea")
public suspend fun notebook(`value`: SynapseNotebookReferenceArgs) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.notebook = mapped
}
/**
* @param argument Synapse notebook reference.
*/
@JvmName("joknljpaoantwmqr")
public suspend fun notebook(argument: suspend SynapseNotebookReferenceArgsBuilder.() -> Unit) {
val toBeMapped = SynapseNotebookReferenceArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.notebook = mapped
}
/**
* @param value Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType integer).
*/
@JvmName("uedloohavqtruxux")
public suspend fun numExecutors(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.numExecutors = mapped
}
/**
* @param value Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
*/
@JvmName("xxkxjmnmtpwiicba")
public suspend fun onInactiveMarkAs(`value`: Either?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.onInactiveMarkAs = mapped
}
/**
* @param value Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
*/
@JvmName("hjxptihoswdteyni")
public fun onInactiveMarkAs(`value`: String) {
val toBeMapped = Either.ofLeft(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.onInactiveMarkAs = mapped
}
/**
* @param value Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
*/
@JvmName("yiivctcbabxfkibu")
public fun onInactiveMarkAs(`value`: ActivityOnInactiveMarkAs) {
val toBeMapped = Either.ofRight(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.onInactiveMarkAs = mapped
}
/**
* @param value Notebook parameters.
*/
@JvmName("cgxmrbkbvhwdulmc")
public suspend fun parameters(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.parameters = mapped
}
/**
* @param argument Notebook parameters.
*/
@JvmName("dgknbdavsvifhbqa")
public suspend fun parameters(vararg argument: Pair Unit>) {
val toBeMapped = argument.toList().map { (left, right) ->
left to
NotebookParameterArgsBuilder().applySuspend { right() }.build()
}.toMap()
val mapped = of(toBeMapped)
this.parameters = mapped
}
/**
* @param values Notebook parameters.
*/
@JvmName("syxqldloeqnvyyug")
public fun parameters(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.parameters = mapped
}
/**
* @param value Activity policy.
*/
@JvmName("cmlvfcbdmrpsvkxn")
public suspend fun policy(`value`: ActivityPolicyArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.policy = mapped
}
/**
* @param argument Activity policy.
*/
@JvmName("dwntgcdpovvbymyy")
public suspend fun policy(argument: suspend ActivityPolicyArgsBuilder.() -> Unit) {
val toBeMapped = ActivityPolicyArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.policy = mapped
}
/**
* @param value Spark configuration property.
*/
@JvmName("jixaylmxjoywhlev")
public suspend fun sparkConfig(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.sparkConfig = mapped
}
/**
* @param values Spark configuration property.
*/
@JvmName("iqgavungnxaftftp")
public fun sparkConfig(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.sparkConfig = mapped
}
/**
* @param value The name of the big data pool which will be used to execute the notebook.
*/
@JvmName("ykkhvjcqdiitwyaj")
public suspend fun sparkPool(`value`: BigDataPoolParametrizationReferenceArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.sparkPool = mapped
}
/**
* @param argument The name of the big data pool which will be used to execute the notebook.
*/
@JvmName("yhnrdkvcaynmbddp")
public suspend fun sparkPool(argument: suspend BigDataPoolParametrizationReferenceArgsBuilder.() -> Unit) {
val toBeMapped = BigDataPoolParametrizationReferenceArgsBuilder().applySuspend {
argument()
}.build()
val mapped = of(toBeMapped)
this.sparkPool = mapped
}
/**
* @param value Activity state. This is an optional property and if not provided, the state will be Active by default.
*/
@JvmName("kjbplvqaxvvgcirw")
public suspend fun state(`value`: Either?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.state = mapped
}
/**
* @param value Activity state. This is an optional property and if not provided, the state will be Active by default.
*/
@JvmName("pvirskgisvxcrirr")
public fun state(`value`: String) {
val toBeMapped = Either.ofLeft(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.state = mapped
}
/**
* @param value Activity state. This is an optional property and if not provided, the state will be Active by default.
*/
@JvmName("hsqvkniqyjjqtemx")
public fun state(`value`: ActivityState) {
val toBeMapped = Either.ofRight(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.state = mapped
}
/**
* @param value The spark configuration of the spark job.
*/
@JvmName("ktjmxrelwqgkqoce")
public suspend fun targetSparkConfiguration(`value`: SparkConfigurationParametrizationReferenceArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.targetSparkConfiguration = mapped
}
/**
* @param argument The spark configuration of the spark job.
*/
@JvmName("ftccuoodpsdxjsjn")
public suspend fun targetSparkConfiguration(argument: suspend SparkConfigurationParametrizationReferenceArgsBuilder.() -> Unit) {
val toBeMapped = SparkConfigurationParametrizationReferenceArgsBuilder().applySuspend {
argument()
}.build()
val mapped = of(toBeMapped)
this.targetSparkConfiguration = mapped
}
/**
* @param value Type of activity.
* Expected value is 'SynapseNotebook'.
*/
@JvmName("wjfxnvhcvsyrfpbv")
public suspend fun type(`value`: String) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.type = mapped
}
/**
* @param value Activity user properties.
*/
@JvmName("qmbtlnplkgkveuba")
public suspend fun userProperties(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.userProperties = mapped
}
/**
* @param argument Activity user properties.
*/
@JvmName("mpoyexddexcitpxf")
public suspend fun userProperties(argument: List Unit>) {
val toBeMapped = argument.toList().map {
UserPropertyArgsBuilder().applySuspend { it() }.build()
}
val mapped = of(toBeMapped)
this.userProperties = mapped
}
/**
* @param argument Activity user properties.
*/
@JvmName("osymwikbfmnevxkv")
public suspend fun userProperties(vararg argument: suspend UserPropertyArgsBuilder.() -> Unit) {
val toBeMapped = argument.toList().map {
UserPropertyArgsBuilder().applySuspend { it() }.build()
}
val mapped = of(toBeMapped)
this.userProperties = mapped
}
/**
* @param argument Activity user properties.
*/
@JvmName("wcdfbltifgunvcox")
public suspend fun userProperties(argument: suspend UserPropertyArgsBuilder.() -> Unit) {
val toBeMapped = listOf(UserPropertyArgsBuilder().applySuspend { argument() }.build())
val mapped = of(toBeMapped)
this.userProperties = mapped
}
/**
* @param values Activity user properties.
*/
@JvmName("gbfxwtqlwcdflrih")
public suspend fun userProperties(vararg values: UserPropertyArgs) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.userProperties = mapped
}
internal fun build(): SynapseNotebookActivityArgs = SynapseNotebookActivityArgs(
conf = conf,
configurationType = configurationType,
dependsOn = dependsOn,
description = description,
driverSize = driverSize,
executorSize = executorSize,
linkedServiceName = linkedServiceName,
name = name ?: throw PulumiNullFieldException("name"),
notebook = notebook ?: throw PulumiNullFieldException("notebook"),
numExecutors = numExecutors,
onInactiveMarkAs = onInactiveMarkAs,
parameters = parameters,
policy = policy,
sparkConfig = sparkConfig,
sparkPool = sparkPool,
state = state,
targetSparkConfiguration = targetSparkConfiguration,
type = type ?: throw PulumiNullFieldException("type"),
userProperties = userProperties,
)
}