
src.dataproc.v1.Job.scala Maven / Gradle / Ivy
package besom.api.googlenative.dataproc.v1
final case class Job private(
urn: besom.types.Output[besom.types.URN],
id: besom.types.Output[besom.types.ResourceId],
done: besom.types.Output[Boolean],
driverControlFilesUri: besom.types.Output[String],
driverOutputResourceUri: besom.types.Output[String],
driverSchedulingConfig: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.DriverSchedulingConfigResponse],
flinkJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.FlinkJobResponse],
hadoopJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.HadoopJobResponse],
hiveJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.HiveJobResponse],
jobUuid: besom.types.Output[String],
labels: besom.types.Output[scala.Predef.Map[String, String]],
pigJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PigJobResponse],
placement: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobPlacementResponse],
prestoJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PrestoJobResponse],
project: besom.types.Output[String],
pysparkJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PySparkJobResponse],
reference: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobReferenceResponse],
region: besom.types.Output[String],
scheduling: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobSchedulingResponse],
sparkJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkJobResponse],
sparkRJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkRJobResponse],
sparkSqlJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkSqlJobResponse],
status: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse],
statusHistory: besom.types.Output[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse]],
trinoJob: besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.TrinoJobResponse],
yarnApplications: besom.types.Output[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.YarnApplicationResponse]]
) extends besom.CustomResource
object Job extends besom.ResourceCompanion[Job]:
/** Resource constructor for Job.
*
* @param name [[besom.util.NonEmptyString]] The unique (stack-wise) name of the resource in Pulumi state (not on provider's side).
* NonEmptyString is inferred automatically from non-empty string literals, even when interpolated. If you encounter any
* issues with this, please try using `: NonEmptyString` type annotation. If you need to convert a dynamically generated
* string to NonEmptyString, use `NonEmptyString.apply` method - `NonEmptyString(str): Option[NonEmptyString]`.
*
* @param args [[JobArgs]] The configuration to use to create this resource.
*
* @param opts [[besom.CustomResourceOptions]] Resource options to use for this resource.
* Defaults to empty options. If you need to set some options, use [[besom.opts]] function to create them, for example:
*
* {{{
* val res = Job(
* "my-resource",
* JobArgs(...), // your args
* opts(provider = myProvider)
* )
* }}}
*/
def apply(using ctx: besom.types.Context)(
name: besom.util.NonEmptyString,
args: JobArgs,
opts: besom.ResourceOptsVariant.Custom ?=> besom.CustomResourceOptions = besom.CustomResourceOptions()
): besom.types.Output[Job] =
ctx.readOrRegisterResource[Job, JobArgs]("google-native:dataproc/v1:Job", name, args, opts(using besom.ResourceOptsVariant.Custom))
private[besom] def typeToken: besom.types.ResourceType = "google-native:dataproc/v1:Job"
given resourceDecoder(using besom.types.Context): besom.types.ResourceDecoder[Job] =
besom.internal.ResourceDecoder.derived[Job]
given decoder(using besom.types.Context): besom.types.Decoder[Job] =
besom.internal.Decoder.customResourceDecoder[Job]
given outputOps: {} with
extension(output: besom.types.Output[Job])
def urn : besom.types.Output[besom.types.URN] = output.flatMap(_.urn)
def id : besom.types.Output[besom.types.ResourceId] = output.flatMap(_.id)
def done : besom.types.Output[Boolean] = output.flatMap(_.done)
def driverControlFilesUri : besom.types.Output[String] = output.flatMap(_.driverControlFilesUri)
def driverOutputResourceUri : besom.types.Output[String] = output.flatMap(_.driverOutputResourceUri)
def driverSchedulingConfig : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.DriverSchedulingConfigResponse] = output.flatMap(_.driverSchedulingConfig)
def flinkJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.FlinkJobResponse] = output.flatMap(_.flinkJob)
def hadoopJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.HadoopJobResponse] = output.flatMap(_.hadoopJob)
def hiveJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.HiveJobResponse] = output.flatMap(_.hiveJob)
def jobUuid : besom.types.Output[String] = output.flatMap(_.jobUuid)
def labels : besom.types.Output[scala.Predef.Map[String, String]] = output.flatMap(_.labels)
def pigJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PigJobResponse] = output.flatMap(_.pigJob)
def placement : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobPlacementResponse] = output.flatMap(_.placement)
def prestoJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PrestoJobResponse] = output.flatMap(_.prestoJob)
def project : besom.types.Output[String] = output.flatMap(_.project)
def pysparkJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PySparkJobResponse] = output.flatMap(_.pysparkJob)
def reference : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobReferenceResponse] = output.flatMap(_.reference)
def region : besom.types.Output[String] = output.flatMap(_.region)
def scheduling : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobSchedulingResponse] = output.flatMap(_.scheduling)
def sparkJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkJobResponse] = output.flatMap(_.sparkJob)
def sparkRJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkRJobResponse] = output.flatMap(_.sparkRJob)
def sparkSqlJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkSqlJobResponse] = output.flatMap(_.sparkSqlJob)
def status : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse] = output.flatMap(_.status)
def statusHistory : besom.types.Output[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse]] = output.flatMap(_.statusHistory)
def trinoJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.TrinoJobResponse] = output.flatMap(_.trinoJob)
def yarnApplications : besom.types.Output[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.YarnApplicationResponse]] = output.flatMap(_.yarnApplications)
© 2015 - 2025 Weber Informatics LLC | Privacy Policy