
src.dataproc.v1.GetJobResult.scala Maven / Gradle / Ivy
package besom.api.googlenative.dataproc.v1
final case class GetJobResult private(
done: Boolean,
driverControlFilesUri: String,
driverOutputResourceUri: String,
driverSchedulingConfig: besom.api.googlenative.dataproc.v1.outputs.DriverSchedulingConfigResponse,
flinkJob: besom.api.googlenative.dataproc.v1.outputs.FlinkJobResponse,
hadoopJob: besom.api.googlenative.dataproc.v1.outputs.HadoopJobResponse,
hiveJob: besom.api.googlenative.dataproc.v1.outputs.HiveJobResponse,
jobUuid: String,
labels: scala.Predef.Map[String, String],
pigJob: besom.api.googlenative.dataproc.v1.outputs.PigJobResponse,
placement: besom.api.googlenative.dataproc.v1.outputs.JobPlacementResponse,
prestoJob: besom.api.googlenative.dataproc.v1.outputs.PrestoJobResponse,
pysparkJob: besom.api.googlenative.dataproc.v1.outputs.PySparkJobResponse,
reference: besom.api.googlenative.dataproc.v1.outputs.JobReferenceResponse,
scheduling: besom.api.googlenative.dataproc.v1.outputs.JobSchedulingResponse,
sparkJob: besom.api.googlenative.dataproc.v1.outputs.SparkJobResponse,
sparkRJob: besom.api.googlenative.dataproc.v1.outputs.SparkRJobResponse,
sparkSqlJob: besom.api.googlenative.dataproc.v1.outputs.SparkSqlJobResponse,
status: besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse,
statusHistory: scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse],
trinoJob: besom.api.googlenative.dataproc.v1.outputs.TrinoJobResponse,
yarnApplications: scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.YarnApplicationResponse]
)
object GetJobResult :
given decoder(using besom.types.Context): besom.types.Decoder[GetJobResult] =
besom.internal.Decoder.derived[GetJobResult]
given outputOps: {} with
extension(output: besom.types.Output[GetJobResult])
def done : besom.types.Output[Boolean] = output.map(_.done)
def driverControlFilesUri : besom.types.Output[String] = output.map(_.driverControlFilesUri)
def driverOutputResourceUri : besom.types.Output[String] = output.map(_.driverOutputResourceUri)
def driverSchedulingConfig : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.DriverSchedulingConfigResponse] = output.map(_.driverSchedulingConfig)
def flinkJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.FlinkJobResponse] = output.map(_.flinkJob)
def hadoopJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.HadoopJobResponse] = output.map(_.hadoopJob)
def hiveJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.HiveJobResponse] = output.map(_.hiveJob)
def jobUuid : besom.types.Output[String] = output.map(_.jobUuid)
def labels : besom.types.Output[scala.Predef.Map[String, String]] = output.map(_.labels)
def pigJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PigJobResponse] = output.map(_.pigJob)
def placement : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobPlacementResponse] = output.map(_.placement)
def prestoJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PrestoJobResponse] = output.map(_.prestoJob)
def pysparkJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PySparkJobResponse] = output.map(_.pysparkJob)
def reference : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobReferenceResponse] = output.map(_.reference)
def scheduling : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobSchedulingResponse] = output.map(_.scheduling)
def sparkJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkJobResponse] = output.map(_.sparkJob)
def sparkRJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkRJobResponse] = output.map(_.sparkRJob)
def sparkSqlJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkSqlJobResponse] = output.map(_.sparkSqlJob)
def status : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse] = output.map(_.status)
def statusHistory : besom.types.Output[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse]] = output.map(_.statusHistory)
def trinoJob : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.TrinoJobResponse] = output.map(_.trinoJob)
def yarnApplications : besom.types.Output[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.YarnApplicationResponse]] = output.map(_.yarnApplications)
given optionOutputOps: {} with
extension(output: besom.types.Output[scala.Option[GetJobResult]])
def done : besom.types.Output[scala.Option[Boolean]] = output.map(_.map(_.done))
def driverControlFilesUri : besom.types.Output[scala.Option[String]] = output.map(_.map(_.driverControlFilesUri))
def driverOutputResourceUri : besom.types.Output[scala.Option[String]] = output.map(_.map(_.driverOutputResourceUri))
def driverSchedulingConfig : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.DriverSchedulingConfigResponse]] = output.map(_.map(_.driverSchedulingConfig))
def flinkJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.FlinkJobResponse]] = output.map(_.map(_.flinkJob))
def hadoopJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.HadoopJobResponse]] = output.map(_.map(_.hadoopJob))
def hiveJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.HiveJobResponse]] = output.map(_.map(_.hiveJob))
def jobUuid : besom.types.Output[scala.Option[String]] = output.map(_.map(_.jobUuid))
def labels : besom.types.Output[scala.Option[scala.Predef.Map[String, String]]] = output.map(_.map(_.labels))
def pigJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.PigJobResponse]] = output.map(_.map(_.pigJob))
def placement : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.JobPlacementResponse]] = output.map(_.map(_.placement))
def prestoJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.PrestoJobResponse]] = output.map(_.map(_.prestoJob))
def pysparkJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.PySparkJobResponse]] = output.map(_.map(_.pysparkJob))
def reference : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.JobReferenceResponse]] = output.map(_.map(_.reference))
def scheduling : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.JobSchedulingResponse]] = output.map(_.map(_.scheduling))
def sparkJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.SparkJobResponse]] = output.map(_.map(_.sparkJob))
def sparkRJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.SparkRJobResponse]] = output.map(_.map(_.sparkRJob))
def sparkSqlJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.SparkSqlJobResponse]] = output.map(_.map(_.sparkSqlJob))
def status : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse]] = output.map(_.map(_.status))
def statusHistory : besom.types.Output[scala.Option[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.JobStatusResponse]]] = output.map(_.map(_.statusHistory))
def trinoJob : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.TrinoJobResponse]] = output.map(_.map(_.trinoJob))
def yarnApplications : besom.types.Output[scala.Option[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.YarnApplicationResponse]]] = output.map(_.map(_.yarnApplications))
© 2015 - 2025 Weber Informatics LLC | Privacy Policy