All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.inputs.JobSparkConfigArgs.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin.inputs

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 *
 * @property archiveUris HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
 * @property args The arguments to pass to the driver.
 * @property fileUris HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
 * @property jarFileUris HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
 * @property loggingConfig The runtime logging config of the job
 * @property mainClass The class containing the main method of the driver. Must be in a
 * provided jar or jar that is already on the classpath. Conflicts with `main_jar_file_uri`
 * @property mainJarFileUri The HCFS URI of jar file containing
 * the driver jar. Conflicts with `main_class`
 * @property properties A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
 * * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 */
public data class JobSparkConfigArgs(
    public val archiveUris: Output>? = null,
    public val args: Output>? = null,
    public val fileUris: Output>? = null,
    public val jarFileUris: Output>? = null,
    public val loggingConfig: Output? = null,
    public val mainClass: Output? = null,
    public val mainJarFileUri: Output? = null,
    public val properties: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs =
        com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs.builder()
            .archiveUris(archiveUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .args(args?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .fileUris(fileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .jarFileUris(jarFileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .loggingConfig(loggingConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .mainClass(mainClass?.applyValue({ args0 -> args0 }))
            .mainJarFileUri(mainJarFileUri?.applyValue({ args0 -> args0 }))
            .properties(
                properties?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            ).build()
}

/**
 * Builder for [JobSparkConfigArgs].
 */
@PulumiTagMarker
public class JobSparkConfigArgsBuilder internal constructor() {
    private var archiveUris: Output>? = null

    private var args: Output>? = null

    private var fileUris: Output>? = null

    private var jarFileUris: Output>? = null

    private var loggingConfig: Output? = null

    private var mainClass: Output? = null

    private var mainJarFileUri: Output? = null

    private var properties: Output>? = null

    /**
     * @param value HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
     */
    @JvmName("ppvfpecahoqwkmwn")
    public suspend fun archiveUris(`value`: Output>) {
        this.archiveUris = value
    }

    @JvmName("htousyvxgjudxyhj")
    public suspend fun archiveUris(vararg values: Output) {
        this.archiveUris = Output.all(values.asList())
    }

    /**
     * @param values HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
     */
    @JvmName("vlamgebqldexwgbv")
    public suspend fun archiveUris(values: List>) {
        this.archiveUris = Output.all(values)
    }

    /**
     * @param value The arguments to pass to the driver.
     */
    @JvmName("nerdbgewmfwpgvmp")
    public suspend fun args(`value`: Output>) {
        this.args = value
    }

    @JvmName("ouitfqbjbhykwlfa")
    public suspend fun args(vararg values: Output) {
        this.args = Output.all(values.asList())
    }

    /**
     * @param values The arguments to pass to the driver.
     */
    @JvmName("tpqvexltimfdllyq")
    public suspend fun args(values: List>) {
        this.args = Output.all(values)
    }

    /**
     * @param value HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
     */
    @JvmName("brnjoleisshixphy")
    public suspend fun fileUris(`value`: Output>) {
        this.fileUris = value
    }

    @JvmName("evhjvpmdhgdnwtxn")
    public suspend fun fileUris(vararg values: Output) {
        this.fileUris = Output.all(values.asList())
    }

    /**
     * @param values HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
     */
    @JvmName("rndbqyrruifnebsx")
    public suspend fun fileUris(values: List>) {
        this.fileUris = Output.all(values)
    }

    /**
     * @param value HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
     */
    @JvmName("tybaaiksjsbsorwq")
    public suspend fun jarFileUris(`value`: Output>) {
        this.jarFileUris = value
    }

    @JvmName("gyypdkyqqveljfmo")
    public suspend fun jarFileUris(vararg values: Output) {
        this.jarFileUris = Output.all(values.asList())
    }

    /**
     * @param values HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
     */
    @JvmName("adglpnokjpuvuesk")
    public suspend fun jarFileUris(values: List>) {
        this.jarFileUris = Output.all(values)
    }

    /**
     * @param value The runtime logging config of the job
     */
    @JvmName("miqwqubvhxvyxrmn")
    public suspend fun loggingConfig(`value`: Output) {
        this.loggingConfig = value
    }

    /**
     * @param value The class containing the main method of the driver. Must be in a
     * provided jar or jar that is already on the classpath. Conflicts with `main_jar_file_uri`
     */
    @JvmName("rqfdxtuxpselswuk")
    public suspend fun mainClass(`value`: Output) {
        this.mainClass = value
    }

    /**
     * @param value The HCFS URI of jar file containing
     * the driver jar. Conflicts with `main_class`
     */
    @JvmName("otahihgiysqxpaxx")
    public suspend fun mainJarFileUri(`value`: Output) {
        this.mainJarFileUri = value
    }

    /**
     * @param value A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
     * * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
     */
    @JvmName("lhrpjuwyvcnrpxyp")
    public suspend fun properties(`value`: Output>) {
        this.properties = value
    }

    /**
     * @param value HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
     */
    @JvmName("lcvgvmssdeumjlmc")
    public suspend fun archiveUris(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.archiveUris = mapped
    }

    /**
     * @param values HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
     */
    @JvmName("aeflrpicsxjrpbfm")
    public suspend fun archiveUris(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.archiveUris = mapped
    }

    /**
     * @param value The arguments to pass to the driver.
     */
    @JvmName("utpxsqljwaganwsg")
    public suspend fun args(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.args = mapped
    }

    /**
     * @param values The arguments to pass to the driver.
     */
    @JvmName("pwwkxjlmbjtsdwye")
    public suspend fun args(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.args = mapped
    }

    /**
     * @param value HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
     */
    @JvmName("ltvdqmqaaaghydud")
    public suspend fun fileUris(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.fileUris = mapped
    }

    /**
     * @param values HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
     */
    @JvmName("qrokdthwkdsrqtaq")
    public suspend fun fileUris(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.fileUris = mapped
    }

    /**
     * @param value HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
     */
    @JvmName("aupvaukftcpidbpa")
    public suspend fun jarFileUris(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.jarFileUris = mapped
    }

    /**
     * @param values HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
     */
    @JvmName("elmopgnfpwxxvcki")
    public suspend fun jarFileUris(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.jarFileUris = mapped
    }

    /**
     * @param value The runtime logging config of the job
     */
    @JvmName("jbuppgjdnpgdhbun")
    public suspend fun loggingConfig(`value`: JobSparkConfigLoggingConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.loggingConfig = mapped
    }

    /**
     * @param argument The runtime logging config of the job
     */
    @JvmName("vxftccxvryhjugwa")
    public suspend fun loggingConfig(argument: suspend JobSparkConfigLoggingConfigArgsBuilder.() -> Unit) {
        val toBeMapped = JobSparkConfigLoggingConfigArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.loggingConfig = mapped
    }

    /**
     * @param value The class containing the main method of the driver. Must be in a
     * provided jar or jar that is already on the classpath. Conflicts with `main_jar_file_uri`
     */
    @JvmName("fmafxvffdnwekblq")
    public suspend fun mainClass(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.mainClass = mapped
    }

    /**
     * @param value The HCFS URI of jar file containing
     * the driver jar. Conflicts with `main_class`
     */
    @JvmName("soscraekmtgfqyul")
    public suspend fun mainJarFileUri(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.mainJarFileUri = mapped
    }

    /**
     * @param value A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
     * * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
     */
    @JvmName("kktshmybrrgqdrdw")
    public suspend fun properties(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.properties = mapped
    }

    /**
     * @param values A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in `/etc/spark/conf/spark-defaults.conf` and classes in user code.
     * * `logging_config.driver_log_levels`- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
     */
    @JvmName("khiwonjixathycki")
    public fun properties(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.properties = mapped
    }

    internal fun build(): JobSparkConfigArgs = JobSparkConfigArgs(
        archiveUris = archiveUris,
        args = args,
        fileUris = fileUris,
        jarFileUris = jarFileUris,
        loggingConfig = loggingConfig,
        mainClass = mainClass,
        mainJarFileUri = mainJarFileUri,
        properties = properties,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy