All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.googlenative.dataproc.v1beta2.kotlin.Job.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

The newest version!
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.googlenative.dataproc.v1beta2.kotlin

import com.pulumi.core.Output
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.HadoopJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.HiveJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobPlacementResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobReferenceResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobSchedulingResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobStatusResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.PigJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.PrestoJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.PySparkJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.SparkJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.SparkRJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.SparkSqlJobResponse
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.YarnApplicationResponse
import com.pulumi.kotlin.KotlinCustomResource
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.ResourceMapper
import com.pulumi.kotlin.options.CustomResourceOptions
import com.pulumi.kotlin.options.CustomResourceOptionsBuilder
import com.pulumi.resources.Resource
import kotlin.Boolean
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.HadoopJobResponse.Companion.toKotlin as hadoopJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.HiveJobResponse.Companion.toKotlin as hiveJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobPlacementResponse.Companion.toKotlin as jobPlacementResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobReferenceResponse.Companion.toKotlin as jobReferenceResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobSchedulingResponse.Companion.toKotlin as jobSchedulingResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.JobStatusResponse.Companion.toKotlin as jobStatusResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.PigJobResponse.Companion.toKotlin as pigJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.PrestoJobResponse.Companion.toKotlin as prestoJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.PySparkJobResponse.Companion.toKotlin as pySparkJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.SparkJobResponse.Companion.toKotlin as sparkJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.SparkRJobResponse.Companion.toKotlin as sparkRJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.SparkSqlJobResponse.Companion.toKotlin as sparkSqlJobResponseToKotlin
import com.pulumi.googlenative.dataproc.v1beta2.kotlin.outputs.YarnApplicationResponse.Companion.toKotlin as yarnApplicationResponseToKotlin

/**
 * Builder for [Job].
 */
@PulumiTagMarker
public class JobResourceBuilder internal constructor() {
    public var name: String? = null

    public var args: JobArgs = JobArgs()

    public var opts: CustomResourceOptions = CustomResourceOptions()

    /**
     * @param name The _unique_ name of the resulting resource.
     */
    public fun name(`value`: String) {
        this.name = value
    }

    /**
     * @param block The arguments to use to populate this resource's properties.
     */
    public suspend fun args(block: suspend JobArgsBuilder.() -> Unit) {
        val builder = JobArgsBuilder()
        block(builder)
        this.args = builder.build()
    }

    /**
     * @param block A bag of options that control this resource's behavior.
     */
    public suspend fun opts(block: suspend CustomResourceOptionsBuilder.() -> Unit) {
        this.opts = com.pulumi.kotlin.options.CustomResourceOptions.opts(block)
    }

    internal fun build(): Job {
        val builtJavaResource = com.pulumi.googlenative.dataproc.v1beta2.Job(
            this.name,
            this.args.toJava(),
            this.opts.toJava(),
        )
        return Job(builtJavaResource)
    }
}

/**
 * Submits a job to a cluster.
 * Auto-naming is currently not supported for this resource.
 */
public class Job internal constructor(
    override val javaResource: com.pulumi.googlenative.dataproc.v1beta2.Job,
) : KotlinCustomResource(javaResource, JobMapper) {
    /**
     * Indicates whether the job is completed. If the value is false, the job is still in progress. If true, the job is completed, and status.state field will indicate if it was successful, failed, or cancelled.
     */
    public val done: Output
        get() = javaResource.done().applyValue({ args0 -> args0 })

    /**
     * If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
     */
    public val driverControlFilesUri: Output
        get() = javaResource.driverControlFilesUri().applyValue({ args0 -> args0 })

    /**
     * A URI pointing to the location of the stdout of the job's driver program.
     */
    public val driverOutputResourceUri: Output
        get() = javaResource.driverOutputResourceUri().applyValue({ args0 -> args0 })

    /**
     * Optional. Job is a Hadoop job.
     */
    public val hadoopJob: Output
        get() = javaResource.hadoopJob().applyValue({ args0 ->
            args0.let({ args0 ->
                hadoopJobResponseToKotlin(args0)
            })
        })

    /**
     * Optional. Job is a Hive job.
     */
    public val hiveJob: Output
        get() = javaResource.hiveJob().applyValue({ args0 ->
            args0.let({ args0 ->
                hiveJobResponseToKotlin(args0)
            })
        })

    /**
     * A UUID that uniquely identifies a job within the project over time. This is in contrast to a user-settable reference.job_id that may be reused over time.
     */
    public val jobUuid: Output
        get() = javaResource.jobUuid().applyValue({ args0 -> args0 })

    /**
     * Optional. The labels to associate with this job. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
     */
    public val labels: Output>
        get() = javaResource.labels().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.key.to(args0.value)
            }).toMap()
        })

    /**
     * Optional. Job is a Pig job.
     */
    public val pigJob: Output
        get() = javaResource.pigJob().applyValue({ args0 ->
            args0.let({ args0 ->
                pigJobResponseToKotlin(args0)
            })
        })

    /**
     * Job information, including how, when, and where to run the job.
     */
    public val placement: Output
        get() = javaResource.placement().applyValue({ args0 ->
            args0.let({ args0 ->
                jobPlacementResponseToKotlin(args0)
            })
        })

    /**
     * Optional. Job is a Presto job.
     */
    public val prestoJob: Output
        get() = javaResource.prestoJob().applyValue({ args0 ->
            args0.let({ args0 ->
                prestoJobResponseToKotlin(args0)
            })
        })

    public val project: Output
        get() = javaResource.project().applyValue({ args0 -> args0 })

    /**
     * Optional. Job is a PySpark job.
     */
    public val pysparkJob: Output
        get() = javaResource.pysparkJob().applyValue({ args0 ->
            args0.let({ args0 ->
                pySparkJobResponseToKotlin(args0)
            })
        })

    /**
     * Optional. The fully qualified reference to the job, which can be used to obtain the equivalent REST path of the job resource. If this property is not specified when a job is created, the server generates a job_id.
     */
    public val reference: Output
        get() = javaResource.reference().applyValue({ args0 ->
            args0.let({ args0 ->
                jobReferenceResponseToKotlin(args0)
            })
        })

    public val region: Output
        get() = javaResource.region().applyValue({ args0 -> args0 })

    /**
     * Optional. Job scheduling configuration.
     */
    public val scheduling: Output
        get() = javaResource.scheduling().applyValue({ args0 ->
            args0.let({ args0 ->
                jobSchedulingResponseToKotlin(args0)
            })
        })

    /**
     * Optional. Job is a Spark job.
     */
    public val sparkJob: Output
        get() = javaResource.sparkJob().applyValue({ args0 ->
            args0.let({ args0 ->
                sparkJobResponseToKotlin(args0)
            })
        })

    /**
     * Optional. Job is a SparkR job.
     */
    public val sparkRJob: Output
        get() = javaResource.sparkRJob().applyValue({ args0 ->
            args0.let({ args0 ->
                sparkRJobResponseToKotlin(args0)
            })
        })

    /**
     * Optional. Job is a SparkSql job.
     */
    public val sparkSqlJob: Output
        get() = javaResource.sparkSqlJob().applyValue({ args0 ->
            args0.let({ args0 ->
                sparkSqlJobResponseToKotlin(args0)
            })
        })

    /**
     * The job status. Additional application-specific status information may be contained in the type_job and yarn_applications fields.
     */
    public val status: Output
        get() = javaResource.status().applyValue({ args0 ->
            args0.let({ args0 ->
                jobStatusResponseToKotlin(args0)
            })
        })

    /**
     * The previous job status.
     */
    public val statusHistory: Output>
        get() = javaResource.statusHistory().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> jobStatusResponseToKotlin(args0) })
            })
        })

    /**
     * The email address of the user submitting the job. For jobs submitted on the cluster, the address is username@hostname.
     */
    public val submittedBy: Output
        get() = javaResource.submittedBy().applyValue({ args0 -> args0 })

    /**
     * The collection of YARN applications spun up by this job.Beta Feature: This report is available for testing purposes only. It may be changed before final release.
     */
    public val yarnApplications: Output>
        get() = javaResource.yarnApplications().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> yarnApplicationResponseToKotlin(args0) })
            })
        })
}

public object JobMapper : ResourceMapper {
    override fun supportsMappingOfType(javaResource: Resource): Boolean =
        com.pulumi.googlenative.dataproc.v1beta2.Job::class == javaResource::class

    override fun map(javaResource: Resource): Job = Job(
        javaResource as
            com.pulumi.googlenative.dataproc.v1beta2.Job,
    )
}

/**
 * @see [Job].
 * @param name The _unique_ name of the resulting resource.
 * @param block Builder for [Job].
 */
public suspend fun job(name: String, block: suspend JobResourceBuilder.() -> Unit): Job {
    val builder = JobResourceBuilder()
    builder.name(name)
    block(builder)
    return builder.build()
}

/**
 * @see [Job].
 * @param name The _unique_ name of the resulting resource.
 */
public fun job(name: String): Job {
    val builder = JobResourceBuilder()
    builder.name(name)
    return builder.build()
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy