All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJob.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin.outputs

import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.collections.Map

/**
 *
 * @property hadoopJob Job is a Hadoop job.
 * @property hiveJob Job is a Hive job.
 * @property labels The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
 * @property pigJob Job is a Pig job.
 * @property prerequisiteStepIds The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
 * @property prestoJob Job is a Presto job.
 * @property pysparkJob Job is a PySpark job.
 * @property scheduling Job scheduling configuration.
 * @property sparkJob Job is a Spark job.
 * @property sparkRJob Job is a SparkR job.
 * @property sparkSqlJob Job is a SparkSql job.
 * @property stepId Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job `goog-dataproc-workflow-step-id` label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
 */
public data class WorkflowTemplateJob(
    public val hadoopJob: WorkflowTemplateJobHadoopJob? = null,
    public val hiveJob: WorkflowTemplateJobHiveJob? = null,
    public val labels: Map? = null,
    public val pigJob: WorkflowTemplateJobPigJob? = null,
    public val prerequisiteStepIds: List? = null,
    public val prestoJob: WorkflowTemplateJobPrestoJob? = null,
    public val pysparkJob: WorkflowTemplateJobPysparkJob? = null,
    public val scheduling: WorkflowTemplateJobScheduling? = null,
    public val sparkJob: WorkflowTemplateJobSparkJob? = null,
    public val sparkRJob: WorkflowTemplateJobSparkRJob? = null,
    public val sparkSqlJob: WorkflowTemplateJobSparkSqlJob? = null,
    public val stepId: String,
) {
    public companion object {
        public fun toKotlin(javaType: com.pulumi.gcp.dataproc.outputs.WorkflowTemplateJob): WorkflowTemplateJob = WorkflowTemplateJob(
            hadoopJob = javaType.hadoopJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobHadoopJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            hiveJob = javaType.hiveJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobHiveJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            labels = javaType.labels().map({ args0 -> args0.key.to(args0.value) }).toMap(),
            pigJob = javaType.pigJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobPigJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            prerequisiteStepIds = javaType.prerequisiteStepIds().map({ args0 -> args0 }),
            prestoJob = javaType.prestoJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobPrestoJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            pysparkJob = javaType.pysparkJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobPysparkJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            scheduling = javaType.scheduling().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobScheduling.Companion.toKotlin(args0)
                })
            }).orElse(null),
            sparkJob = javaType.sparkJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobSparkJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            sparkRJob = javaType.sparkRJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobSparkRJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            sparkSqlJob = javaType.sparkSqlJob().map({ args0 ->
                args0.let({ args0 ->
                    com.pulumi.gcp.dataproc.kotlin.outputs.WorkflowTemplateJobSparkSqlJob.Companion.toKotlin(args0)
                })
            }).orElse(null),
            stepId = javaType.stepId(),
        )
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy