Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.pulumi.gcp.dataproc.kotlin.inputs.JobSparkConfigArgs.kt Maven / Gradle / Ivy
Go to download
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.dataproc.kotlin.inputs
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName
/**
*
* @property archiveUris HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
* @property args The arguments to pass to the driver.
* @property fileUris HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
* @property jarFileUris HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
* @property loggingConfig The runtime logging config of the job
* @property mainClass The class containing the main method of the driver. Must be in a
* provided jar or jar that is already on the classpath. Conflicts with `main_jar_file_uri`
* @property mainJarFileUri The HCFS URI of jar file containing
* the driver jar. Conflicts with `main_class`
* @property properties A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
* * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*/
public data class JobSparkConfigArgs(
public val archiveUris: Output>? = null,
public val args: Output>? = null,
public val fileUris: Output>? = null,
public val jarFileUris: Output>? = null,
public val loggingConfig: Output? = null,
public val mainClass: Output? = null,
public val mainJarFileUri: Output? = null,
public val properties: Output>? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs =
com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs.builder()
.archiveUris(archiveUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.args(args?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.fileUris(fileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.jarFileUris(jarFileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.loggingConfig(loggingConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.mainClass(mainClass?.applyValue({ args0 -> args0 }))
.mainJarFileUri(mainJarFileUri?.applyValue({ args0 -> args0 }))
.properties(
properties?.applyValue({ args0 ->
args0.map({ args0 ->
args0.key.to(args0.value)
}).toMap()
}),
).build()
}
/**
* Builder for [JobSparkConfigArgs].
*/
@PulumiTagMarker
public class JobSparkConfigArgsBuilder internal constructor() {
private var archiveUris: Output>? = null
private var args: Output>? = null
private var fileUris: Output>? = null
private var jarFileUris: Output>? = null
private var loggingConfig: Output? = null
private var mainClass: Output? = null
private var mainJarFileUri: Output? = null
private var properties: Output>? = null
/**
* @param value HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("xlthotiqqdyxcgct")
public suspend fun archiveUris(`value`: Output>) {
this.archiveUris = value
}
@JvmName("xsvkomxdrenevljf")
public suspend fun archiveUris(vararg values: Output) {
this.archiveUris = Output.all(values.asList())
}
/**
* @param values HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("ombwbcminlpmptku")
public suspend fun archiveUris(values: List>) {
this.archiveUris = Output.all(values)
}
/**
* @param value The arguments to pass to the driver.
*/
@JvmName("bvvraoehguhaukwe")
public suspend fun args(`value`: Output>) {
this.args = value
}
@JvmName("uopxphqhsqfxfvar")
public suspend fun args(vararg values: Output) {
this.args = Output.all(values.asList())
}
/**
* @param values The arguments to pass to the driver.
*/
@JvmName("ufimmxmiiustexsa")
public suspend fun args(values: List>) {
this.args = Output.all(values)
}
/**
* @param value HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
*/
@JvmName("ufukdyqkkmkdvldi")
public suspend fun fileUris(`value`: Output>) {
this.fileUris = value
}
@JvmName("gvhftwbetacvtslh")
public suspend fun fileUris(vararg values: Output) {
this.fileUris = Output.all(values.asList())
}
/**
* @param values HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
*/
@JvmName("jdbapiljlvlclseg")
public suspend fun fileUris(values: List>) {
this.fileUris = Output.all(values)
}
/**
* @param value HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
*/
@JvmName("vhdhffowftbymxbc")
public suspend fun jarFileUris(`value`: Output>) {
this.jarFileUris = value
}
@JvmName("xxswrevvsjlqxbvv")
public suspend fun jarFileUris(vararg values: Output) {
this.jarFileUris = Output.all(values.asList())
}
/**
* @param values HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
*/
@JvmName("ofajroiyfpnnjbne")
public suspend fun jarFileUris(values: List>) {
this.jarFileUris = Output.all(values)
}
/**
* @param value The runtime logging config of the job
*/
@JvmName("sxafciosjocjqgyi")
public suspend fun loggingConfig(`value`: Output) {
this.loggingConfig = value
}
/**
* @param value The class containing the main method of the driver. Must be in a
* provided jar or jar that is already on the classpath. Conflicts with `main_jar_file_uri`
*/
@JvmName("uejhvqnghjdymuel")
public suspend fun mainClass(`value`: Output) {
this.mainClass = value
}
/**
* @param value The HCFS URI of jar file containing
* the driver jar. Conflicts with `main_class`
*/
@JvmName("efaxcbycoortafjj")
public suspend fun mainJarFileUri(`value`: Output) {
this.mainJarFileUri = value
}
/**
* @param value A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
* * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*/
@JvmName("byigyegnsdqngbhp")
public suspend fun properties(`value`: Output>) {
this.properties = value
}
/**
* @param value HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("jtwevxgtstyhaclh")
public suspend fun archiveUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.archiveUris = mapped
}
/**
* @param values HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
*/
@JvmName("arqrhijhjrwefoos")
public suspend fun archiveUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.archiveUris = mapped
}
/**
* @param value The arguments to pass to the driver.
*/
@JvmName("iikttekojtdmtclo")
public suspend fun args(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.args = mapped
}
/**
* @param values The arguments to pass to the driver.
*/
@JvmName("rbgedrsjlgqfgdns")
public suspend fun args(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.args = mapped
}
/**
* @param value HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
*/
@JvmName("ptdqxvlnjtvkauhv")
public suspend fun fileUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.fileUris = mapped
}
/**
* @param values HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
*/
@JvmName("yiavmuiuarwmfxol")
public suspend fun fileUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.fileUris = mapped
}
/**
* @param value HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
*/
@JvmName("pfjuvejxeiopoiwp")
public suspend fun jarFileUris(`value`: List?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.jarFileUris = mapped
}
/**
* @param values HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
*/
@JvmName("ynqdwxtrrxrdjomm")
public suspend fun jarFileUris(vararg values: String) {
val toBeMapped = values.toList()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.jarFileUris = mapped
}
/**
* @param value The runtime logging config of the job
*/
@JvmName("ticqbtntcortgwvy")
public suspend fun loggingConfig(`value`: JobSparkConfigLoggingConfigArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.loggingConfig = mapped
}
/**
* @param argument The runtime logging config of the job
*/
@JvmName("sqmidrciiieylvfb")
public suspend fun loggingConfig(argument: suspend JobSparkConfigLoggingConfigArgsBuilder.() -> Unit) {
val toBeMapped = JobSparkConfigLoggingConfigArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.loggingConfig = mapped
}
/**
* @param value The class containing the main method of the driver. Must be in a
* provided jar or jar that is already on the classpath. Conflicts with `main_jar_file_uri`
*/
@JvmName("naumxoemedvakhfg")
public suspend fun mainClass(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.mainClass = mapped
}
/**
* @param value The HCFS URI of jar file containing
* the driver jar. Conflicts with `main_class`
*/
@JvmName("dprxegfwyeksertp")
public suspend fun mainJarFileUri(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.mainJarFileUri = mapped
}
/**
* @param value A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
* * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*/
@JvmName("benifuosrxmniper")
public suspend fun properties(`value`: Map?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.properties = mapped
}
/**
* @param values A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
* * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*/
@JvmName("jgffrnxergtxecfg")
public fun properties(vararg values: Pair) {
val toBeMapped = values.toMap()
val mapped = toBeMapped.let({ args0 -> of(args0) })
this.properties = mapped
}
internal fun build(): JobSparkConfigArgs = JobSparkConfigArgs(
archiveUris = archiveUris,
args = args,
fileUris = fileUris,
jarFileUris = jarFileUris,
loggingConfig = loggingConfig,
mainClass = mainClass,
mainJarFileUri = mainJarFileUri,
properties = properties,
)
}