All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateJobArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.12.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin.inputs

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobArgs.builder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 *
 * @property hadoopJob Job is a Hadoop job.
 * @property hiveJob Job is a Hive job.
 * @property labels The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
 * @property pigJob Job is a Pig job.
 * @property prerequisiteStepIds The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
 * @property prestoJob Job is a Presto job.
 * @property pysparkJob Job is a PySpark job.
 * @property scheduling Job scheduling configuration.
 * @property sparkJob Job is a Spark job.
 * @property sparkRJob Job is a SparkR job.
 * @property sparkSqlJob Job is a SparkSql job.
 * @property stepId Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job `goog-dataproc-workflow-step-id` label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
 */
public data class WorkflowTemplateJobArgs(
    public val hadoopJob: Output? = null,
    public val hiveJob: Output? = null,
    public val labels: Output>? = null,
    public val pigJob: Output? = null,
    public val prerequisiteStepIds: Output>? = null,
    public val prestoJob: Output? = null,
    public val pysparkJob: Output? = null,
    public val scheduling: Output? = null,
    public val sparkJob: Output? = null,
    public val sparkRJob: Output? = null,
    public val sparkSqlJob: Output? = null,
    public val stepId: Output,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobArgs =
        com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobArgs.builder()
            .hadoopJob(hadoopJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .hiveJob(hiveJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .labels(labels?.applyValue({ args0 -> args0.map({ args0 -> args0.key.to(args0.value) }).toMap() }))
            .pigJob(pigJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .prerequisiteStepIds(prerequisiteStepIds?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
            .prestoJob(prestoJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .pysparkJob(pysparkJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .scheduling(scheduling?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .sparkJob(sparkJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .sparkRJob(sparkRJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .sparkSqlJob(sparkSqlJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .stepId(stepId.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [WorkflowTemplateJobArgs].
 */
@PulumiTagMarker
public class WorkflowTemplateJobArgsBuilder internal constructor() {
    private var hadoopJob: Output? = null

    private var hiveJob: Output? = null

    private var labels: Output>? = null

    private var pigJob: Output? = null

    private var prerequisiteStepIds: Output>? = null

    private var prestoJob: Output? = null

    private var pysparkJob: Output? = null

    private var scheduling: Output? = null

    private var sparkJob: Output? = null

    private var sparkRJob: Output? = null

    private var sparkSqlJob: Output? = null

    private var stepId: Output? = null

    /**
     * @param value Job is a Hadoop job.
     */
    @JvmName("mmpeysvifmpjatil")
    public suspend fun hadoopJob(`value`: Output) {
        this.hadoopJob = value
    }

    /**
     * @param value Job is a Hive job.
     */
    @JvmName("cdpcemtydhbdunqp")
    public suspend fun hiveJob(`value`: Output) {
        this.hiveJob = value
    }

    /**
     * @param value The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
     */
    @JvmName("dbdheqficbbcibiq")
    public suspend fun labels(`value`: Output>) {
        this.labels = value
    }

    /**
     * @param value Job is a Pig job.
     */
    @JvmName("tlerhsoywcnlrfta")
    public suspend fun pigJob(`value`: Output) {
        this.pigJob = value
    }

    /**
     * @param value The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
     */
    @JvmName("yeqlqcjjjhkiaaqh")
    public suspend fun prerequisiteStepIds(`value`: Output>) {
        this.prerequisiteStepIds = value
    }

    @JvmName("fkjiskqfayqfdkne")
    public suspend fun prerequisiteStepIds(vararg values: Output) {
        this.prerequisiteStepIds = Output.all(values.asList())
    }

    /**
     * @param values The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
     */
    @JvmName("hdgsarmavrimqldn")
    public suspend fun prerequisiteStepIds(values: List>) {
        this.prerequisiteStepIds = Output.all(values)
    }

    /**
     * @param value Job is a Presto job.
     */
    @JvmName("jixhqovtefhiporm")
    public suspend fun prestoJob(`value`: Output) {
        this.prestoJob = value
    }

    /**
     * @param value Job is a PySpark job.
     */
    @JvmName("bkxchaqsxyaenbnp")
    public suspend fun pysparkJob(`value`: Output) {
        this.pysparkJob = value
    }

    /**
     * @param value Job scheduling configuration.
     */
    @JvmName("rjgwvhfnykpwwcxt")
    public suspend fun scheduling(`value`: Output) {
        this.scheduling = value
    }

    /**
     * @param value Job is a Spark job.
     */
    @JvmName("qsgvehxwusfpobsh")
    public suspend fun sparkJob(`value`: Output) {
        this.sparkJob = value
    }

    /**
     * @param value Job is a SparkR job.
     */
    @JvmName("ffqqnaslpbednahq")
    public suspend fun sparkRJob(`value`: Output) {
        this.sparkRJob = value
    }

    /**
     * @param value Job is a SparkSql job.
     */
    @JvmName("aaceecpaetwnevdc")
    public suspend fun sparkSqlJob(`value`: Output) {
        this.sparkSqlJob = value
    }

    /**
     * @param value Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job `goog-dataproc-workflow-step-id` label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
     */
    @JvmName("tbvgibgaidurqtfe")
    public suspend fun stepId(`value`: Output) {
        this.stepId = value
    }

    /**
     * @param value Job is a Hadoop job.
     */
    @JvmName("sbjyelyliljnfwjs")
    public suspend fun hadoopJob(`value`: WorkflowTemplateJobHadoopJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.hadoopJob = mapped
    }

    /**
     * @param argument Job is a Hadoop job.
     */
    @JvmName("cgyewhbyunsbocdn")
    public suspend fun hadoopJob(argument: suspend WorkflowTemplateJobHadoopJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobHadoopJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.hadoopJob = mapped
    }

    /**
     * @param value Job is a Hive job.
     */
    @JvmName("imsfhynwgnhdvsar")
    public suspend fun hiveJob(`value`: WorkflowTemplateJobHiveJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.hiveJob = mapped
    }

    /**
     * @param argument Job is a Hive job.
     */
    @JvmName("xdrboyopdvglbuuh")
    public suspend fun hiveJob(argument: suspend WorkflowTemplateJobHiveJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobHiveJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.hiveJob = mapped
    }

    /**
     * @param value The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
     */
    @JvmName("ldmatftsmvucvfiq")
    public suspend fun labels(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param values The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
     */
    @JvmName("eyagliplggjmkgjh")
    public fun labels(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param value Job is a Pig job.
     */
    @JvmName("pibtkbevdirsopns")
    public suspend fun pigJob(`value`: WorkflowTemplateJobPigJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.pigJob = mapped
    }

    /**
     * @param argument Job is a Pig job.
     */
    @JvmName("yciicnpocdlbbsuy")
    public suspend fun pigJob(argument: suspend WorkflowTemplateJobPigJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobPigJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.pigJob = mapped
    }

    /**
     * @param value The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
     */
    @JvmName("ylmmyxkhatwwaxfr")
    public suspend fun prerequisiteStepIds(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.prerequisiteStepIds = mapped
    }

    /**
     * @param values The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
     */
    @JvmName("mlrshdhooukighvl")
    public suspend fun prerequisiteStepIds(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.prerequisiteStepIds = mapped
    }

    /**
     * @param value Job is a Presto job.
     */
    @JvmName("hygiggbwsdjufnhv")
    public suspend fun prestoJob(`value`: WorkflowTemplateJobPrestoJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.prestoJob = mapped
    }

    /**
     * @param argument Job is a Presto job.
     */
    @JvmName("tchaoxcojswbjavp")
    public suspend fun prestoJob(argument: suspend WorkflowTemplateJobPrestoJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobPrestoJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.prestoJob = mapped
    }

    /**
     * @param value Job is a PySpark job.
     */
    @JvmName("vtqpwqhyfilrndye")
    public suspend fun pysparkJob(`value`: WorkflowTemplateJobPysparkJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.pysparkJob = mapped
    }

    /**
     * @param argument Job is a PySpark job.
     */
    @JvmName("hchkdayvheaocfdr")
    public suspend fun pysparkJob(argument: suspend WorkflowTemplateJobPysparkJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobPysparkJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.pysparkJob = mapped
    }

    /**
     * @param value Job scheduling configuration.
     */
    @JvmName("oktmllyhippudsbr")
    public suspend fun scheduling(`value`: WorkflowTemplateJobSchedulingArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.scheduling = mapped
    }

    /**
     * @param argument Job scheduling configuration.
     */
    @JvmName("dnqpfseylvgxrglr")
    public suspend fun scheduling(argument: suspend WorkflowTemplateJobSchedulingArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobSchedulingArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.scheduling = mapped
    }

    /**
     * @param value Job is a Spark job.
     */
    @JvmName("juefcscbacsffxae")
    public suspend fun sparkJob(`value`: WorkflowTemplateJobSparkJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sparkJob = mapped
    }

    /**
     * @param argument Job is a Spark job.
     */
    @JvmName("ueomvqfrjvloyasc")
    public suspend fun sparkJob(argument: suspend WorkflowTemplateJobSparkJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobSparkJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.sparkJob = mapped
    }

    /**
     * @param value Job is a SparkR job.
     */
    @JvmName("eqqesjccdtcukklt")
    public suspend fun sparkRJob(`value`: WorkflowTemplateJobSparkRJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sparkRJob = mapped
    }

    /**
     * @param argument Job is a SparkR job.
     */
    @JvmName("tluobmbvecelyeng")
    public suspend fun sparkRJob(argument: suspend WorkflowTemplateJobSparkRJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobSparkRJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.sparkRJob = mapped
    }

    /**
     * @param value Job is a SparkSql job.
     */
    @JvmName("cwjxpbyuglrffhdi")
    public suspend fun sparkSqlJob(`value`: WorkflowTemplateJobSparkSqlJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sparkSqlJob = mapped
    }

    /**
     * @param argument Job is a SparkSql job.
     */
    @JvmName("jtywrthtlpewoatc")
    public suspend fun sparkSqlJob(argument: suspend WorkflowTemplateJobSparkSqlJobArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplateJobSparkSqlJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.sparkSqlJob = mapped
    }

    /**
     * @param value Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job `goog-dataproc-workflow-step-id` label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
     */
    @JvmName("nqogwmvvjehwqcwi")
    public suspend fun stepId(`value`: String) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.stepId = mapped
    }

    internal fun build(): WorkflowTemplateJobArgs = WorkflowTemplateJobArgs(
        hadoopJob = hadoopJob,
        hiveJob = hiveJob,
        labels = labels,
        pigJob = pigJob,
        prerequisiteStepIds = prerequisiteStepIds,
        prestoJob = prestoJob,
        pysparkJob = pysparkJob,
        scheduling = scheduling,
        sparkJob = sparkJob,
        sparkRJob = sparkRJob,
        sparkSqlJob = sparkSqlJob,
        stepId = stepId ?: throw PulumiNullFieldException("stepId"),
    )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy