
com.pulumi.azurenative.datafactory.kotlin.outputs.SynapseNotebookActivityResponse.kt Maven / Gradle / Ivy
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azurenative.datafactory.kotlin.outputs
import kotlin.Any
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map
/**
* Execute Synapse notebook activity.
* @property conf Spark configuration properties, which will override the 'conf' of the notebook you provide.
* @property configurationType The type of the spark config.
* @property dependsOn Activity depends on condition.
* @property description Activity description.
* @property driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string).
* @property executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string).
* @property linkedServiceName Linked service reference.
* @property name Activity name.
* @property notebook Synapse notebook reference.
* @property numExecutors Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType integer).
* @property onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default.
* @property parameters Notebook parameters.
* @property policy Activity policy.
* @property sparkConfig Spark configuration property.
* @property sparkPool The name of the big data pool which will be used to execute the notebook.
* @property state Activity state. This is an optional property and if not provided, the state will be Active by default.
* @property targetSparkConfiguration The spark configuration of the spark job.
* @property type Type of activity.
* Expected value is 'SynapseNotebook'.
* @property userProperties Activity user properties.
*/
public data class SynapseNotebookActivityResponse(
public val conf: Any? = null,
public val configurationType: String? = null,
public val dependsOn: List? = null,
public val description: String? = null,
public val driverSize: Any? = null,
public val executorSize: Any? = null,
public val linkedServiceName: LinkedServiceReferenceResponse? = null,
public val name: String,
public val notebook: SynapseNotebookReferenceResponse,
public val numExecutors: Any? = null,
public val onInactiveMarkAs: String? = null,
public val parameters: Map? = null,
public val policy: ActivityPolicyResponse? = null,
public val sparkConfig: Map? = null,
public val sparkPool: BigDataPoolParametrizationReferenceResponse? = null,
public val state: String? = null,
public val targetSparkConfiguration: SparkConfigurationParametrizationReferenceResponse? = null,
public val type: String,
public val userProperties: List? = null,
) {
public companion object {
public fun toKotlin(javaType: com.pulumi.azurenative.datafactory.outputs.SynapseNotebookActivityResponse): SynapseNotebookActivityResponse = SynapseNotebookActivityResponse(
conf = javaType.conf().map({ args0 -> args0 }).orElse(null),
configurationType = javaType.configurationType().map({ args0 -> args0 }).orElse(null),
dependsOn = javaType.dependsOn().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.ActivityDependencyResponse.Companion.toKotlin(args0)
})
}),
description = javaType.description().map({ args0 -> args0 }).orElse(null),
driverSize = javaType.driverSize().map({ args0 -> args0 }).orElse(null),
executorSize = javaType.executorSize().map({ args0 -> args0 }).orElse(null),
linkedServiceName = javaType.linkedServiceName().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.LinkedServiceReferenceResponse.Companion.toKotlin(args0)
})
}).orElse(null),
name = javaType.name(),
notebook = javaType.notebook().let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.SynapseNotebookReferenceResponse.Companion.toKotlin(args0)
}),
numExecutors = javaType.numExecutors().map({ args0 -> args0 }).orElse(null),
onInactiveMarkAs = javaType.onInactiveMarkAs().map({ args0 -> args0 }).orElse(null),
parameters = javaType.parameters().map({ args0 ->
args0.key.to(
args0.value.let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.NotebookParameterResponse.Companion.toKotlin(args0)
}),
)
}).toMap(),
policy = javaType.policy().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.ActivityPolicyResponse.Companion.toKotlin(args0)
})
}).orElse(null),
sparkConfig = javaType.sparkConfig().map({ args0 -> args0.key.to(args0.value) }).toMap(),
sparkPool = javaType.sparkPool().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.BigDataPoolParametrizationReferenceResponse.Companion.toKotlin(args0)
})
}).orElse(null),
state = javaType.state().map({ args0 -> args0 }).orElse(null),
targetSparkConfiguration = javaType.targetSparkConfiguration().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.SparkConfigurationParametrizationReferenceResponse.Companion.toKotlin(args0)
})
}).orElse(null),
type = javaType.type(),
userProperties = javaType.userProperties().map({ args0 ->
args0.let({ args0 ->
com.pulumi.azurenative.datafactory.kotlin.outputs.UserPropertyResponse.Companion.toKotlin(args0)
})
}),
)
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy