All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateJobSparkSqlJobArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.12.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin.inputs

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobSparkSqlJobArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 *
 * @property jarFileUris HCFS URIs of jar files to be added to the Spark CLASSPATH.
 * @property loggingConfig The runtime log config for job execution.
 * @property properties A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
 * @property queryFileUri The HCFS URI of the script that contains SQL queries.
 * @property queryList A list of queries.
 * @property scriptVariables Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
 */
public data class WorkflowTemplateJobSparkSqlJobArgs(
    public val jarFileUris: Output>? = null,
    public val loggingConfig: Output? = null,
    public val properties: Output>? = null,
    public val queryFileUri: Output? = null,
    public val queryList: Output? = null,
    public val scriptVariables: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobSparkSqlJobArgs =
        com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobSparkSqlJobArgs.builder()
            .jarFileUris(jarFileUris?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .loggingConfig(loggingConfig?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .properties(
                properties?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            )
            .queryFileUri(queryFileUri?.applyValue({ args0 -> args0 }))
            .queryList(queryList?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .scriptVariables(
                scriptVariables?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.key.to(args0.value)
                    }).toMap()
                }),
            ).build()
}

/**
 * Builder for [WorkflowTemplateJobSparkSqlJobArgs].
 */
@PulumiTagMarker
public class WorkflowTemplateJobSparkSqlJobArgsBuilder internal constructor() {
    private var jarFileUris: Output>? = null

    private var loggingConfig: Output? = null

    private var properties: Output>? = null

    private var queryFileUri: Output? = null

    private var queryList: Output? = null

    private var scriptVariables: Output>? = null

    /**
     * @param value HCFS URIs of jar files to be added to the Spark CLASSPATH.
     */
    @JvmName("cliekkipwtplxddf")
    public suspend fun jarFileUris(`value`: Output>) {
        this.jarFileUris = value
    }

    @JvmName("thwgclpsingeytfy")
    public suspend fun jarFileUris(vararg values: Output) {
        this.jarFileUris = Output.all(values.asList())
    }

    /**
     * @param values HCFS URIs of jar files to be added to the Spark CLASSPATH.
     */
    @JvmName("scgkruhvvvvgoyug")
    public suspend fun jarFileUris(values: List>) {
        this.jarFileUris = Output.all(values)
    }

    /**
     * @param value The runtime log config for job execution.
     */
    @JvmName("fvbkffhwltcfqtsy")
    public suspend fun loggingConfig(`value`: Output) {
        this.loggingConfig = value
    }

    /**
     * @param value A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
     */
    @JvmName("bmfysdjxwjelomad")
    public suspend fun properties(`value`: Output>) {
        this.properties = value
    }

    /**
     * @param value The HCFS URI of the script that contains SQL queries.
     */
    @JvmName("djtmgwphxwnykwgl")
    public suspend fun queryFileUri(`value`: Output) {
        this.queryFileUri = value
    }

    /**
     * @param value A list of queries.
     */
    @JvmName("iqitfaqyrsdhdltb")
    public suspend fun queryList(`value`: Output) {
        this.queryList = value
    }

    /**
     * @param value Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
     */
    @JvmName("tymdvtcuudqqyxgo")
    public suspend fun scriptVariables(`value`: Output>) {
        this.scriptVariables = value
    }

    /**
     * @param value HCFS URIs of jar files to be added to the Spark CLASSPATH.
     */
    @JvmName("jyijkunvrbftfsxj")
    public suspend fun jarFileUris(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.jarFileUris = mapped
    }

    /**
     * @param values HCFS URIs of jar files to be added to the Spark CLASSPATH.
     */
    @JvmName("hcuaiiylapepoqlg")
    public suspend fun jarFileUris(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.jarFileUris = mapped
    }

    /**
     * @param value The runtime log config for job execution.
     */
    @JvmName("wbgliwuvdvifqsjv")
    public suspend fun loggingConfig(`value`: WorkflowTemplateJobSparkSqlJobLoggingConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.loggingConfig = mapped
    }

    /**
     * @param argument The runtime log config for job execution.
     */
    @JvmName("njxvmngigsrodxcx")
    public suspend fun loggingConfig(argument: suspend WorkflowTemplateJobSparkSqlJobLoggingConfigArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobSparkSqlJobLoggingConfigArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.loggingConfig = mapped
    }

    /**
     * @param value A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
     */
    @JvmName("scnohgbvtnrljgvs")
    public suspend fun properties(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.properties = mapped
    }

    /**
     * @param values A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
     */
    @JvmName("goveumhsngtyvjoy")
    public fun properties(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.properties = mapped
    }

    /**
     * @param value The HCFS URI of the script that contains SQL queries.
     */
    @JvmName("japopoaxuigjrvbi")
    public suspend fun queryFileUri(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.queryFileUri = mapped
    }

    /**
     * @param value A list of queries.
     */
    @JvmName("jxbpkeklfijkpegx")
    public suspend fun queryList(`value`: WorkflowTemplateJobSparkSqlJobQueryListArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.queryList = mapped
    }

    /**
     * @param argument A list of queries.
     */
    @JvmName("ddwfegrxgbmbmaoq")
    public suspend fun queryList(argument: suspend WorkflowTemplateJobSparkSqlJobQueryListArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobSparkSqlJobQueryListArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.queryList = mapped
    }

    /**
     * @param value Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
     */
    @JvmName("mvhluksqjrolugtr")
    public suspend fun scriptVariables(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.scriptVariables = mapped
    }

    /**
     * @param values Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
     */
    @JvmName("kipwqxelwsxuksyq")
    public fun scriptVariables(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.scriptVariables = mapped
    }

    internal fun build(): WorkflowTemplateJobSparkSqlJobArgs = WorkflowTemplateJobSparkSqlJobArgs(
        jarFileUris = jarFileUris,
        loggingConfig = loggingConfig,
        properties = properties,
        queryFileUri = queryFileUri,
        queryList = queryList,
        scriptVariables = scriptVariables,
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy