Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.pulumi.gcp.bigquery.kotlin.inputs.RoutineSparkOptionsArgs.kt Maven / Gradle / Ivy
Go to download
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.bigquery.kotlin.inputs
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.bigquery.inputs.RoutineSparkOptionsArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName
/**
*
* @property archiveUris Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.
* @property connection Fully qualified name of the user-provided Spark connection object.
* Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"
* @property containerImage Custom container image for the runtime environment.
* @property fileUris Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.
* @property jarUris JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.
* @property mainClass The fully qualified name of a class in jarUris, for example, com.example.wordcount.
* Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type.
* @property mainFileUri The main file/jar URI of the Spark application.
* Exactly one of the definitionBody field and the mainFileUri field must be set for Python.
* Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type.
* @property properties Configuration properties as a set of key/value pairs, which will be passed on to the Spark application.
* For more information, see Apache Spark and the procedure option list.
* An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
* @property pyFileUris Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.
* @property runtimeVersion Runtime version. If not specified, the default runtime version is used.
*/
public data class RoutineSparkOptionsArgs(
public val archiveUris: Output>? = null,
public val connection: Output? = null,
public val containerImage: Output? = null,
public val fileUris: Output>? = null,
public val jarUris: Output>? = null,
public val mainClass: Output? = null,
public val mainFileUri: Output? = null,
public val properties: Output>? = null,
public val pyFileUris: Output>? = null,
public val runtimeVersion: Output? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.gcp.bigquery.inputs.RoutineSparkOptionsArgs =
com.pulumi.gcp.bigquery.inputs.RoutineSparkOptionsArgs.builder()
.archiveUris(archiveUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.connection(connection?.applyValue({ args0 -> args0 }))
.containerImage(containerImage?.applyValue({ args0 -> args0 }))
.fileUris(fileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.jarUris(jarUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.mainClass(mainClass?.applyValue({ args0 -> args0 }))
.mainFileUri(mainFileUri?.applyValue({ args0 -> args0 }))
.properties(
properties?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value)
}).toMap()
}),
)
.pyFileUris(pyFileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.runtimeVersion(runtimeVersion?.applyValue({ args0 -> args0 })).build()
}
/**
* Builder for [RoutineSparkOptionsArgs].
*/
@PulumiTagMarker
public class RoutineSparkOptionsArgsBuilder internal constructor() {
private var archiveUris: Output>? = null
private var connection: Output? = null
private var containerImage: Output? = null
private var fileUris: Output>? = null
private var jarUris: Output>? = null
private var mainClass: Output? = null
private var mainFileUri: Output? = null
private var properties: Output>? = null
private var pyFileUris: Output>? = null
private var runtimeVersion: Output? = null
/**
* @param value Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("tewnuopahjffljco")
public suspend fun archiveUris(`value`: Output>) {
this.archiveUris = value
}
@JvmName("kabkgnmotcaajbix")
public suspend fun archiveUris(vararg values: Output) {
this.archiveUris = Output.all(values.asList())
}
/**
* @param values Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("wmfsjfjxqloyvtcq")
public suspend fun archiveUris(values: List>) {
this.archiveUris = Output.all(values)
}
/**
* @param value Fully qualified name of the user-provided Spark connection object.
* Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"
*/
@JvmName("hooagydxrdsfnsga")
public suspend fun connection(`value`: Output) {
this.connection = value
}
/**
* @param value Custom container image for the runtime environment.
*/
@JvmName("nnvhodwnudfbqpml")
public suspend fun containerImage(`value`: Output) {
this.containerImage = value
}
/**
* @param value Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("ighvwxtbfjrlpdnl")
public suspend fun fileUris(`value`: Output>) {
this.fileUris = value
}
@JvmName("xicyqwauvbirvmmy")
public suspend fun fileUris(vararg values: Output) {
this.fileUris = Output.all(values.asList())
}
/**
* @param values Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("ndyotgpgbgyqhowf")
public suspend fun fileUris(values: List>) {
this.fileUris = Output.all(values)
}
/**
* @param value JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("glbjcqdyxtrolmmg")
public suspend fun jarUris(`value`: Output>) {
this.jarUris = value
}
@JvmName("bwoqjkfbmpqoiieb")
public suspend fun jarUris(vararg values: Output) {
this.jarUris = Output.all(values.asList())
}
/**
* @param values JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("fxegpcustsbrfgpg")
public suspend fun jarUris(values: List>) {
this.jarUris = Output.all(values)
}
/**
* @param value The fully qualified name of a class in jarUris, for example, com.example.wordcount.
* Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type.
*/
@JvmName("lfjwpfawkmbbadrg")
public suspend fun mainClass(`value`: Output) {
this.mainClass = value
}
/**
* @param value The main file/jar URI of the Spark application.
* Exactly one of the definitionBody field and the mainFileUri field must be set for Python.
* Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type.
*/
@JvmName("mhfbyhdlfhgaoypp")
public suspend fun mainFileUri(`value`: Output) {
this.mainFileUri = value
}
/**
* @param value Configuration properties as a set of key/value pairs, which will be passed on to the Spark application.
* For more information, see Apache Spark and the procedure option list.
* An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
*/
@JvmName("xfkuphgujdceaqvu")
public suspend fun properties(`value`: Output>) {
this.properties = value
}
/**
* @param value Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("vvqmraculxevroam")
public suspend fun pyFileUris(`value`: Output>) {
this.pyFileUris = value
}
@JvmName("hrghqjgnuaevvtqn")
public suspend fun pyFileUris(vararg values: Output) {
this.pyFileUris = Output.all(values.asList())
}
/**
* @param values Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("xotavfclhnrudpbp")
public suspend fun pyFileUris(values: List>) {
this.pyFileUris = Output.all(values)
}
/**
* @param value Runtime version. If not specified, the default runtime version is used.
*/
@JvmName("ameeorltawpirlgb")
public suspend fun runtimeVersion(`value`: Output) {
this.runtimeVersion = value
}
/**
* @param value Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("dgsirplbfhoigmgg")
public suspend fun archiveUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.archiveUris = mapped
}
/**
* @param values Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("vnbudqsjahiloqol")
public suspend fun archiveUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.archiveUris = mapped
}
/**
* @param value Fully qualified name of the user-provided Spark connection object.
* Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"
*/
@JvmName("vlogwmdgpgmvfjog")
public suspend fun connection(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.connection = mapped
}
/**
* @param value Custom container image for the runtime environment.
*/
@JvmName("dlmbpdlinviyouad")
public suspend fun containerImage(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.containerImage = mapped
}
/**
* @param value Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("eyocxbgudlrjkicc")
public suspend fun fileUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.fileUris = mapped
}
/**
* @param values Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("jljcmfojgsbbjlyq")
public suspend fun fileUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.fileUris = mapped
}
/**
* @param value JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("aykwktxvtowahlaa")
public suspend fun jarUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.jarUris = mapped
}
/**
* @param values JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("ypxwrgcjrhbijnsd")
public suspend fun jarUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.jarUris = mapped
}
/**
* @param value The fully qualified name of a class in jarUris, for example, com.example.wordcount.
* Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type.
*/
@JvmName("peqvdoqaeghwbakr")
public suspend fun mainClass(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.mainClass = mapped
}
/**
* @param value The main file/jar URI of the Spark application.
* Exactly one of the definitionBody field and the mainFileUri field must be set for Python.
* Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type.
*/
@JvmName("srpwacasneqqpkeu")
public suspend fun mainFileUri(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.mainFileUri = mapped
}
/**
* @param value Configuration properties as a set of key/value pairs, which will be passed on to the Spark application.
* For more information, see Apache Spark and the procedure option list.
* An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
*/
@JvmName("akyhibvmjlsofokk")
public suspend fun properties(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.properties = mapped
}
/**
* @param values Configuration properties as a set of key/value pairs, which will be passed on to the Spark application.
* For more information, see Apache Spark and the procedure option list.
* An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
*/
@JvmName("ypruutgmlaodvljd")
public fun properties(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.properties = mapped
}
/**
* @param value Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("fkygfkiaulpcoghy")
public suspend fun pyFileUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.pyFileUris = mapped
}
/**
* @param values Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.
*/
@JvmName("ptuiogmvvhtklovg")
public suspend fun pyFileUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.pyFileUris = mapped
}
/**
* @param value Runtime version. If not specified, the default runtime version is used.
*/
@JvmName("iharnaksblmtqgpo")
public suspend fun runtimeVersion(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.runtimeVersion = mapped
}
internal fun build(): RoutineSparkOptionsArgs = RoutineSparkOptionsArgs(
archiveUris = archiveUris,
connection = connection,
containerImage = containerImage,
fileUris = fileUris,
jarUris = jarUris,
mainClass = mainClass,
mainFileUri = mainFileUri,
properties = properties,
pyFileUris = pyFileUris,
runtimeVersion = runtimeVersion,
)
}