Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.pulumi.googlenative.dataproc.v1.kotlin.inputs.SparkRJobArgs.kt Maven / Gradle / Ivy
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.googlenative.dataproc.v1.kotlin.inputs
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.googlenative.dataproc.v1.inputs.SparkRJobArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName
/**
* A Dataproc job for running Apache SparkR (https://spark.apache.org/docs/latest/sparkr.html) applications on YARN.
* @property archiveUris Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
* @property args Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
* @property fileUris Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
* @property loggingConfig Optional. The runtime log config for job execution.
* @property mainRFileUri The HCFS URI of the main R file to use as the driver. Must be a .R file.
* @property properties Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
*/
public data class SparkRJobArgs(
public val archiveUris: Output>? = null,
public val args: Output>? = null,
public val fileUris: Output>? = null,
public val loggingConfig: Output? = null,
public val mainRFileUri: Output,
public val properties: Output>? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.googlenative.dataproc.v1.inputs.SparkRJobArgs =
com.pulumi.googlenative.dataproc.v1.inputs.SparkRJobArgs.builder()
.archiveUris(archiveUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.args(args?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.fileUris(fileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.loggingConfig(loggingConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.mainRFileUri(mainRFileUri.applyValue({ args0 -> args0 }))
.properties(
properties?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value)
}).toMap()
}),
).build()
}
/**
* Builder for [SparkRJobArgs].
*/
@PulumiTagMarker
public class SparkRJobArgsBuilder internal constructor() {
private var archiveUris: Output>? = null
private var args: Output>? = null
private var fileUris: Output>? = null
private var loggingConfig: Output? = null
private var mainRFileUri: Output? = null
private var properties: Output>? = null
/**
* @param value Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("uxaihmieuqmyhjcd")
public suspend fun archiveUris(`value`: Output>) {
this.archiveUris = value
}
@JvmName("ewgjtcjikuxnunwo")
public suspend fun archiveUris(vararg values: Output) {
this.archiveUris = Output.all(values.asList())
}
/**
* @param values Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("numwbulpdkvwpqmu")
public suspend fun archiveUris(values: List>) {
this.archiveUris = Output.all(values)
}
/**
* @param value Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*/
@JvmName("ibvyelunbelukohk")
public suspend fun args(`value`: Output>) {
this.args = value
}
@JvmName("lrwrquasydsdrehc")
public suspend fun args(vararg values: Output) {
this.args = Output.all(values.asList())
}
/**
* @param values Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*/
@JvmName("oueagxedsedxpnsk")
public suspend fun args(values: List>) {
this.args = Output.all(values)
}
/**
* @param value Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
*/
@JvmName("irvmgcrisgjipeov")
public suspend fun fileUris(`value`: Output>) {
this.fileUris = value
}
@JvmName("phymagysetboerrf")
public suspend fun fileUris(vararg values: Output) {
this.fileUris = Output.all(values.asList())
}
/**
* @param values Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
*/
@JvmName("xmgqhboxqvoovyhi")
public suspend fun fileUris(values: List>) {
this.fileUris = Output.all(values)
}
/**
* @param value Optional. The runtime log config for job execution.
*/
@JvmName("haulcgqvfhnaqlue")
public suspend fun loggingConfig(`value`: Output) {
this.loggingConfig = value
}
/**
* @param value The HCFS URI of the main R file to use as the driver. Must be a .R file.
*/
@JvmName("hwppbacvutxuhpnf")
public suspend fun mainRFileUri(`value`: Output) {
this.mainRFileUri = value
}
/**
* @param value Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
*/
@JvmName("jdtirwhlugplaxbu")
public suspend fun properties(`value`: Output>) {
this.properties = value
}
/**
* @param value Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("gywjoexlkxlhweey")
public suspend fun archiveUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.archiveUris = mapped
}
/**
* @param values Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("ywwxjufkrbphwcfi")
public suspend fun archiveUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.archiveUris = mapped
}
/**
* @param value Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*/
@JvmName("bguwdlbikaduehyq")
public suspend fun args(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.args = mapped
}
/**
* @param values Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*/
@JvmName("sdpklplsafekgsqs")
public suspend fun args(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.args = mapped
}
/**
* @param value Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
*/
@JvmName("pauwpdiylfejfieb")
public suspend fun fileUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.fileUris = mapped
}
/**
* @param values Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
*/
@JvmName("dhepsoxvnlbdkpek")
public suspend fun fileUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.fileUris = mapped
}
/**
* @param value Optional. The runtime log config for job execution.
*/
@JvmName("ounqvybonmeafquv")
public suspend fun loggingConfig(`value`: LoggingConfigArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.loggingConfig = mapped
}
/**
* @param argument Optional. The runtime log config for job execution.
*/
@JvmName("dlhdcpmhigceeysg")
public suspend fun loggingConfig(argument: suspend LoggingConfigArgsBuilder.() -> Unit) {
val toBeMapped = LoggingConfigArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.loggingConfig = mapped
}
/**
* @param value The HCFS URI of the main R file to use as the driver. Must be a .R file.
*/
@JvmName("ncxbcdiqosnecvdi")
public suspend fun mainRFileUri(`value`: String) {
val toBeMapped = value
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.mainRFileUri = mapped
}
/**
* @param value Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
*/
@JvmName("kwknkcpdsebuernt")
public suspend fun properties(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.properties = mapped
}
/**
* @param values Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
*/
@JvmName("fslegomtkkwoacig")
public fun properties(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.properties = mapped
}
internal fun build(): SparkRJobArgs = SparkRJobArgs(
archiveUris = archiveUris,
args = args,
fileUris = fileUris,
loggingConfig = loggingConfig,
mainRFileUri = mainRFileUri ?: throw PulumiNullFieldException("mainRFileUri"),
properties = properties,
)
}