Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.pulumi.azurenative.datafactory.kotlin.inputs.AzureDatabricksLinkedServiceArgs.kt Maven / Gradle / Ivy
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.azurenative.datafactory.kotlin.inputs
import com.pulumi.azurenative.datafactory.inputs.AzureDatabricksLinkedServiceArgs.builder
import com.pulumi.core.Either
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Any
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName
/**
* Azure Databricks linked service.
* @property accessToken Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string).
* @property annotations List of tags that can be used for describing the linked service.
* @property authentication Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
* @property connectVia The integration runtime reference.
* @property credential The credential reference containing authentication information.
* @property description Linked service description.
* @property domain .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string).
* @property encryptedCredential The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string.
* @property existingClusterId The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string).
* @property instancePoolId The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string).
* @property newClusterCustomTags Additional tags for cluster resources. This property is ignored in instance pool configurations.
* @property newClusterDriverNodeType The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string).
* @property newClusterEnableElasticDisk Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean).
* @property newClusterInitScripts User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings).
* @property newClusterLogDestination Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string).
* @property newClusterNodeType The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string).
* @property newClusterNumOfWorker If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string).
* @property newClusterSparkConf A set of optional, user-specified Spark configuration key-value pairs.
* @property newClusterSparkEnvVars A set of optional, user-specified Spark environment variables key-value pairs.
* @property newClusterVersion If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string).
* @property parameters Parameters for linked service.
* @property policyId The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string).
* @property type Type of linked service.
* Expected value is 'AzureDatabricks'.
* @property version Version of the linked service.
* @property workspaceResourceId Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
*/
public data class AzureDatabricksLinkedServiceArgs(
public val accessToken: Output>? =
null,
public val annotations: Output>? = null,
public val authentication: Output? = null,
public val connectVia: Output? = null,
public val credential: Output? = null,
public val description: Output? = null,
public val domain: Output,
public val encryptedCredential: Output? = null,
public val existingClusterId: Output? = null,
public val instancePoolId: Output? = null,
public val newClusterCustomTags: Output>? = null,
public val newClusterDriverNodeType: Output? = null,
public val newClusterEnableElasticDisk: Output? = null,
public val newClusterInitScripts: Output? = null,
public val newClusterLogDestination: Output? = null,
public val newClusterNodeType: Output? = null,
public val newClusterNumOfWorker: Output? = null,
public val newClusterSparkConf: Output>? = null,
public val newClusterSparkEnvVars: Output>? = null,
public val newClusterVersion: Output? = null,
public val parameters: Output>? = null,
public val policyId: Output? = null,
public val type: Output,
public val version: Output? = null,
public val workspaceResourceId: Output? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.azurenative.datafactory.inputs.AzureDatabricksLinkedServiceArgs = com.pulumi.azurenative.datafactory.inputs.AzureDatabricksLinkedServiceArgs.builder()
.accessToken(
accessToken?.applyValue({ args0 ->
args0.transform({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
}, { args0 -> args0.let({ args0 -> args0.toJava() }) })
}),
)
.annotations(annotations?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.authentication(authentication?.applyValue({ args0 -> args0 }))
.connectVia(connectVia?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.credential(credential?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.description(description?.applyValue({ args0 -> args0 }))
.domain(domain.applyValue({ args0 -> args0 }))
.encryptedCredential(encryptedCredential?.applyValue({ args0 -> args0 }))
.existingClusterId(existingClusterId?.applyValue({ args0 -> args0 }))
.instancePoolId(instancePoolId?.applyValue({ args0 -> args0 }))
.newClusterCustomTags(
newClusterCustomTags?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value)
}).toMap()
}),
)
.newClusterDriverNodeType(newClusterDriverNodeType?.applyValue({ args0 -> args0 }))
.newClusterEnableElasticDisk(newClusterEnableElasticDisk?.applyValue({ args0 -> args0 }))
.newClusterInitScripts(newClusterInitScripts?.applyValue({ args0 -> args0 }))
.newClusterLogDestination(newClusterLogDestination?.applyValue({ args0 -> args0 }))
.newClusterNodeType(newClusterNodeType?.applyValue({ args0 -> args0 }))
.newClusterNumOfWorker(newClusterNumOfWorker?.applyValue({ args0 -> args0 }))
.newClusterSparkConf(
newClusterSparkConf?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value)
}).toMap()
}),
)
.newClusterSparkEnvVars(
newClusterSparkEnvVars?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value)
}).toMap()
}),
)
.newClusterVersion(newClusterVersion?.applyValue({ args0 -> args0 }))
.parameters(
parameters?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value.let({ args0 -> args0.toJava() }))
}).toMap()
}),
)
.policyId(policyId?.applyValue({ args0 -> args0 }))
.type(type.applyValue({ args0 -> args0 }))
.version(version?.applyValue({ args0 -> args0 }))
.workspaceResourceId(workspaceResourceId?.applyValue({ args0 -> args0 })).build()
}
/**
* Builder for [AzureDatabricksLinkedServiceArgs].
*/
@PulumiTagMarker
public class AzureDatabricksLinkedServiceArgsBuilder internal constructor() {
private var accessToken: Output>? =
null
private var annotations: Output>? = null
private var authentication: Output? = null
private var connectVia: Output? = null
private var credential: Output? = null
private var description: Output? = null
private var domain: Output? = null
private var encryptedCredential: Output? = null
private var existingClusterId: Output? = null
private var instancePoolId: Output? = null
private var newClusterCustomTags: Output>? = null
private var newClusterDriverNodeType: Output? = null
private var newClusterEnableElasticDisk: Output? = null
private var newClusterInitScripts: Output? = null
private var newClusterLogDestination: Output? = null
private var newClusterNodeType: Output? = null
private var newClusterNumOfWorker: Output? = null
private var newClusterSparkConf: Output>? = null
private var newClusterSparkEnvVars: Output>? = null
private var newClusterVersion: Output? = null
private var parameters: Output>? = null
private var policyId: Output? = null
private var type: Output? = null
private var version: Output? = null
private var workspaceResourceId: Output? = null
/**
* @param value Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string).
*/
@JvmName("iendnrhuyybsbnid")
public suspend fun accessToken(`value`: Output>) {
this.accessToken = value
}
/**
* @param value List of tags that can be used for describing the linked service.
*/
@JvmName("dtoxlrirycvgslyy")
public suspend fun annotations(`value`: Output>) {
this.annotations = value
}
@JvmName("gmqgbpgdhdixqaak")
public suspend fun annotations(vararg values: Output) {
this.annotations = Output.all(values.asList())
}
/**
* @param values List of tags that can be used for describing the linked service.
*/
@JvmName("veitfanemieuvssu")
public suspend fun annotations(values: List>) {
this.annotations = Output.all(values)
}
/**
* @param value Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
*/
@JvmName("ugjroshosmwpvcdq")
public suspend fun authentication(`value`: Output) {
this.authentication = value
}
/**
* @param value The integration runtime reference.
*/
@JvmName("brffemdyapgwrnpa")
public suspend fun connectVia(`value`: Output) {
this.connectVia = value
}
/**
* @param value The credential reference containing authentication information.
*/
@JvmName("ircoueawgotpndug")
public suspend fun credential(`value`: Output) {
this.credential = value
}
/**
* @param value Linked service description.
*/
@JvmName("qdqhltqfyujpmyub")
public suspend fun description(`value`: Output) {
this.description = value
}
/**
* @param value .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string).
*/
@JvmName("narlvoexufaxokck")
public suspend fun domain(`value`: Output) {
this.domain = value
}
/**
* @param value The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string.
*/
@JvmName("plcagafcnijpdwer")
public suspend fun encryptedCredential(`value`: Output) {
this.encryptedCredential = value
}
/**
* @param value The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string).
*/
@JvmName("vcujjdljwnhlmluv")
public suspend fun existingClusterId(`value`: Output) {
this.existingClusterId = value
}
/**
* @param value The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string).
*/
@JvmName("qdniricxmwxsybkw")
public suspend fun instancePoolId(`value`: Output) {
this.instancePoolId = value
}
/**
* @param value Additional tags for cluster resources. This property is ignored in instance pool configurations.
*/
@JvmName("ckqfdwsusplooteb")
public suspend fun newClusterCustomTags(`value`: Output>) {
this.newClusterCustomTags = value
}
/**
* @param value The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string).
*/
@JvmName("nagchvfmttwgnyjp")
public suspend fun newClusterDriverNodeType(`value`: Output) {
this.newClusterDriverNodeType = value
}
/**
* @param value Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean).
*/
@JvmName("israqbelvoepiilc")
public suspend fun newClusterEnableElasticDisk(`value`: Output) {
this.newClusterEnableElasticDisk = value
}
/**
* @param value User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings).
*/
@JvmName("jbdpiiltcsldgwpm")
public suspend fun newClusterInitScripts(`value`: Output) {
this.newClusterInitScripts = value
}
/**
* @param value Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string).
*/
@JvmName("eqvlqdkaoscutsnt")
public suspend fun newClusterLogDestination(`value`: Output) {
this.newClusterLogDestination = value
}
/**
* @param value The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string).
*/
@JvmName("nopsukwmoajwdayd")
public suspend fun newClusterNodeType(`value`: Output) {
this.newClusterNodeType = value
}
/**
* @param value If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string).
*/
@JvmName("vhtxpbvneoblnxiw")
public suspend fun newClusterNumOfWorker(`value`: Output) {
this.newClusterNumOfWorker = value
}
/**
* @param value A set of optional, user-specified Spark configuration key-value pairs.
*/
@JvmName("ojbxsewgsvwsxxnq")
public suspend fun newClusterSparkConf(`value`: Output>) {
this.newClusterSparkConf = value
}
/**
* @param value A set of optional, user-specified Spark environment variables key-value pairs.
*/
@JvmName("wrouunnxwuyaoibj")
public suspend fun newClusterSparkEnvVars(`value`: Output>) {
this.newClusterSparkEnvVars = value
}
/**
* @param value If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string).
*/
@JvmName("jqsktxcbvqopmpcq")
public suspend fun newClusterVersion(`value`: Output) {
this.newClusterVersion = value
}
/**
* @param value Parameters for linked service.
*/
@JvmName("yhajhpgklyigrqix")
public suspend fun parameters(`value`: Output>) {
this.parameters = value
}
/**
* @param value The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string).
*/
@JvmName("uvthfeakrwindybc")
public suspend fun policyId(`value`: Output) {
this.policyId = value
}
/**
* @param value Type of linked service.
* Expected value is 'AzureDatabricks'.
*/
@JvmName("kxxemehcuqsrmgai")
public suspend fun type(`value`: Output) {
this.type = value
}
/**
* @param value Version of the linked service.
*/
@JvmName("jubwiqjnycwecvcu")
public suspend fun version(`value`: Output) {
this.version = value
}
/**
* @param value Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
*/
@JvmName("yxswdrpojfuabcuq")
public suspend fun workspaceResourceId(`value`: Output) {
this.workspaceResourceId = value
}
/**
* @param value Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string).
*/
@JvmName("wkxkahikjnjjskgp")
public suspend fun accessToken(`value`: Either?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.accessToken = mapped
}
/**
* @param value Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string).
*/
@JvmName("ayrctyloauhwrbcg")
public fun accessToken(`value`: AzureKeyVaultSecretReferenceArgs) {
val toBeMapped = Either.ofLeft(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.accessToken = mapped
}
/**
* @param value Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string).
*/
@JvmName("nhitluornlbmscso")
public fun accessToken(`value`: SecureStringArgs) {
val toBeMapped = Either.ofRight(value)
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.accessToken = mapped
}
/**
* @param value List of tags that can be used for describing the linked service.
*/
@JvmName("qrplchpaxdkcdreq")
public suspend fun annotations(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.annotations = mapped
}
/**
* @param values List of tags that can be used for describing the linked service.
*/
@JvmName("kkifptnxlafhdlgi")
public suspend fun annotations(vararg values: Any) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.annotations = mapped
}
/**
* @param value Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
*/
@JvmName("errelcccgssunhxa")
public suspend fun authentication(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.authentication = mapped
}
/**
* @param value The integration runtime reference.
*/
@JvmName("xejaqwvdxwkplsas")
public suspend fun connectVia(`value`: IntegrationRuntimeReferenceArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.connectVia = mapped
}
/**
* @param argument The integration runtime reference.
*/
@JvmName("qyottqtibuqicitu")
public suspend fun connectVia(argument: suspend IntegrationRuntimeReferenceArgsBuilder.() -> Unit) {
val toBeMapped = IntegrationRuntimeReferenceArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.connectVia = mapped
}
/**
* @param value The credential reference containing authentication information.
*/
@JvmName("livdbmjjqeftoxqb")
public suspend fun credential(`value`: CredentialReferenceArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.credential = mapped
}
/**
* @param argument The credential reference containing authentication information.
*/
@JvmName("tbpejaxocjjuokqx")
public suspend fun credential(argument: suspend CredentialReferenceArgsBuilder.() -> Unit) {
val toBeMapped = CredentialReferenceArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.credential = mapped
}
/**
* @param value Linked service description.
*/
@JvmName("ebbpsoynedoccifg")
public suspend fun description(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.description = mapped
}
/**
* @param value .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string).
*/
@JvmName("gurroegettdqtowk")
public suspend fun domain(`value`: Any) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.domain = mapped
}
/**
* @param value The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string.
*/
@JvmName("emgpxunukirtnehi")
public suspend fun encryptedCredential(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.encryptedCredential = mapped
}
/**
* @param value The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string).
*/
@JvmName("raudumojdcdhcrsx")
public suspend fun existingClusterId(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.existingClusterId = mapped
}
/**
* @param value The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string).
*/
@JvmName("wrhwqtqiviuyrygl")
public suspend fun instancePoolId(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.instancePoolId = mapped
}
/**
* @param value Additional tags for cluster resources. This property is ignored in instance pool configurations.
*/
@JvmName("kotpjtnhlkihvpac")
public suspend fun newClusterCustomTags(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterCustomTags = mapped
}
/**
* @param values Additional tags for cluster resources. This property is ignored in instance pool configurations.
*/
@JvmName("abnupnefedgcpkka")
public fun newClusterCustomTags(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.newClusterCustomTags = mapped
}
/**
* @param value The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string).
*/
@JvmName("nfhyujiqothwscak")
public suspend fun newClusterDriverNodeType(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterDriverNodeType = mapped
}
/**
* @param value Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean).
*/
@JvmName("jkpfkakmcbcgjcjp")
public suspend fun newClusterEnableElasticDisk(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterEnableElasticDisk = mapped
}
/**
* @param value User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings).
*/
@JvmName("hkrrouwwobsvybsi")
public suspend fun newClusterInitScripts(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterInitScripts = mapped
}
/**
* @param value Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string).
*/
@JvmName("skcwuqmwuisaqscw")
public suspend fun newClusterLogDestination(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterLogDestination = mapped
}
/**
* @param value The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string).
*/
@JvmName("oxnicvqynqqoagye")
public suspend fun newClusterNodeType(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterNodeType = mapped
}
/**
* @param value If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string).
*/
@JvmName("osnhslwklcpmbgxd")
public suspend fun newClusterNumOfWorker(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterNumOfWorker = mapped
}
/**
* @param value A set of optional, user-specified Spark configuration key-value pairs.
*/
@JvmName("tjdirllwyapbhpco")
public suspend fun newClusterSparkConf(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterSparkConf = mapped
}
/**
* @param values A set of optional, user-specified Spark configuration key-value pairs.
*/
@JvmName("kuunhuxkpwunorvx")
public fun newClusterSparkConf(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.newClusterSparkConf = mapped
}
/**
* @param value A set of optional, user-specified Spark environment variables key-value pairs.
*/
@JvmName("nyfodkuscvohfqas")
public suspend fun newClusterSparkEnvVars(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterSparkEnvVars = mapped
}
/**
* @param values A set of optional, user-specified Spark environment variables key-value pairs.
*/
@JvmName("rddmttcoqqnmamju")
public fun newClusterSparkEnvVars(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.newClusterSparkEnvVars = mapped
}
/**
* @param value If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string).
*/
@JvmName("nabevjdfkdamvhed")
public suspend fun newClusterVersion(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.newClusterVersion = mapped
}
/**
* @param value Parameters for linked service.
*/
@JvmName("yrggbqmkrbcbiwrf")
public suspend fun parameters(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.parameters = mapped
}
/**
* @param argument Parameters for linked service.
*/
@JvmName("frnutyidffcfocng")
public suspend fun parameters(vararg argument: Pair Unit>) {
val toBeMapped = argument.toList().map { (left, right) ->
left to
ParameterSpecificationArgsBuilder().applySuspend { right() }.build()
}.toMap()
val mapped = of(toBeMapped)
this.parameters = mapped
}
/**
* @param values Parameters for linked service.
*/
@JvmName("cbifcsrkcvdewdlh")
public fun parameters(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.parameters = mapped
}
/**
* @param value The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string).
*/
@JvmName("ghajfotvrghytaot")
public suspend fun policyId(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.policyId = mapped
}
/**
* @param value Type of linked service.
* Expected value is 'AzureDatabricks'.
*/
@JvmName("sgkcmfabkdjwwvoa")
public suspend fun type(`value`: String) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.type = mapped
}
/**
* @param value Version of the linked service.
*/
@JvmName("etpjuhlyujjjqvsn")
public suspend fun version(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.version = mapped
}
/**
* @param value Workspace resource id for databricks REST API. Type: string (or Expression with resultType string).
*/
@JvmName("ywnstsbmkvwqbkby")
public suspend fun workspaceResourceId(`value`: Any?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.workspaceResourceId = mapped
}
internal fun build(): AzureDatabricksLinkedServiceArgs = AzureDatabricksLinkedServiceArgs(
accessToken = accessToken,
annotations = annotations,
authentication = authentication,
connectVia = connectVia,
credential = credential,
description = description,
domain = domain ?: throw PulumiNullFieldException("domain"),
encryptedCredential = encryptedCredential,
existingClusterId = existingClusterId,
instancePoolId = instancePoolId,
newClusterCustomTags = newClusterCustomTags,
newClusterDriverNodeType = newClusterDriverNodeType,
newClusterEnableElasticDisk = newClusterEnableElasticDisk,
newClusterInitScripts = newClusterInitScripts,
newClusterLogDestination = newClusterLogDestination,
newClusterNodeType = newClusterNodeType,
newClusterNumOfWorker = newClusterNumOfWorker,
newClusterSparkConf = newClusterSparkConf,
newClusterSparkEnvVars = newClusterSparkEnvVars,
newClusterVersion = newClusterVersion,
parameters = parameters,
policyId = policyId,
type = type ?: throw PulumiNullFieldException("type"),
version = version,
workspaceResourceId = workspaceResourceId,
)
}