All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.aws.emrcontainers.kotlin.inputs.JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 6.57.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.aws.emrcontainers.kotlin.inputs

import com.pulumi.aws.emrcontainers.inputs.JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.String
import kotlin.Suppress
import kotlin.jvm.JvmName

/**
 *
 * @property entryPoint The SQL file to be executed.
 * @property sparkSqlParameters The Spark parameters to be included in the Spark SQL command.
 */
public data class JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs(
    public val entryPoint: Output? = null,
    public val sparkSqlParameters: Output? = null,
) :
    ConvertibleToJava {
    override fun toJava(): com.pulumi.aws.emrcontainers.inputs.JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs =
        com.pulumi.aws.emrcontainers.inputs.JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs.builder()
            .entryPoint(entryPoint?.applyValue({ args0 -> args0 }))
            .sparkSqlParameters(sparkSqlParameters?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs].
 */
@PulumiTagMarker
public class JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgsBuilder internal constructor() {
    private var entryPoint: Output? = null

    private var sparkSqlParameters: Output? = null

    /**
     * @param value The SQL file to be executed.
     */
    @JvmName("eidoaapnstqypuns")
    public suspend fun entryPoint(`value`: Output) {
        this.entryPoint = value
    }

    /**
     * @param value The Spark parameters to be included in the Spark SQL command.
     */
    @JvmName("knhblyyoirpkjywy")
    public suspend fun sparkSqlParameters(`value`: Output) {
        this.sparkSqlParameters = value
    }

    /**
     * @param value The SQL file to be executed.
     */
    @JvmName("pevonjpmirmybvky")
    public suspend fun entryPoint(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.entryPoint = mapped
    }

    /**
     * @param value The Spark parameters to be included in the Spark SQL command.
     */
    @JvmName("hbipukmonbvrnvpe")
    public suspend fun sparkSqlParameters(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sparkSqlParameters = mapped
    }

    internal fun build(): JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs =
        JobTemplateJobTemplateDataJobDriverSparkSqlJobDriverArgs(
            entryPoint = entryPoint,
            sparkSqlParameters = sparkSqlParameters,
        )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy