
src.dataproc.v1.GetBatchResult.scala Maven / Gradle / Ivy
package besom.api.googlenative.dataproc.v1
final case class GetBatchResult private(
createTime: String,
creator: String,
environmentConfig: besom.api.googlenative.dataproc.v1.outputs.EnvironmentConfigResponse,
labels: scala.Predef.Map[String, String],
name: String,
operation: String,
pysparkBatch: besom.api.googlenative.dataproc.v1.outputs.PySparkBatchResponse,
runtimeConfig: besom.api.googlenative.dataproc.v1.outputs.RuntimeConfigResponse,
runtimeInfo: besom.api.googlenative.dataproc.v1.outputs.RuntimeInfoResponse,
sparkBatch: besom.api.googlenative.dataproc.v1.outputs.SparkBatchResponse,
sparkRBatch: besom.api.googlenative.dataproc.v1.outputs.SparkRBatchResponse,
sparkSqlBatch: besom.api.googlenative.dataproc.v1.outputs.SparkSqlBatchResponse,
state: String,
stateHistory: scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.StateHistoryResponse],
stateMessage: String,
stateTime: String,
uuid: String
)
object GetBatchResult :
given decoder(using besom.types.Context): besom.types.Decoder[GetBatchResult] =
besom.internal.Decoder.derived[GetBatchResult]
given outputOps: {} with
extension(output: besom.types.Output[GetBatchResult])
def createTime : besom.types.Output[String] = output.map(_.createTime)
def creator : besom.types.Output[String] = output.map(_.creator)
def environmentConfig : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.EnvironmentConfigResponse] = output.map(_.environmentConfig)
def labels : besom.types.Output[scala.Predef.Map[String, String]] = output.map(_.labels)
def name : besom.types.Output[String] = output.map(_.name)
def operation : besom.types.Output[String] = output.map(_.operation)
def pysparkBatch : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.PySparkBatchResponse] = output.map(_.pysparkBatch)
def runtimeConfig : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.RuntimeConfigResponse] = output.map(_.runtimeConfig)
def runtimeInfo : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.RuntimeInfoResponse] = output.map(_.runtimeInfo)
def sparkBatch : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkBatchResponse] = output.map(_.sparkBatch)
def sparkRBatch : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkRBatchResponse] = output.map(_.sparkRBatch)
def sparkSqlBatch : besom.types.Output[besom.api.googlenative.dataproc.v1.outputs.SparkSqlBatchResponse] = output.map(_.sparkSqlBatch)
def state : besom.types.Output[String] = output.map(_.state)
def stateHistory : besom.types.Output[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.StateHistoryResponse]] = output.map(_.stateHistory)
def stateMessage : besom.types.Output[String] = output.map(_.stateMessage)
def stateTime : besom.types.Output[String] = output.map(_.stateTime)
def uuid : besom.types.Output[String] = output.map(_.uuid)
given optionOutputOps: {} with
extension(output: besom.types.Output[scala.Option[GetBatchResult]])
def createTime : besom.types.Output[scala.Option[String]] = output.map(_.map(_.createTime))
def creator : besom.types.Output[scala.Option[String]] = output.map(_.map(_.creator))
def environmentConfig : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.EnvironmentConfigResponse]] = output.map(_.map(_.environmentConfig))
def labels : besom.types.Output[scala.Option[scala.Predef.Map[String, String]]] = output.map(_.map(_.labels))
def name : besom.types.Output[scala.Option[String]] = output.map(_.map(_.name))
def operation : besom.types.Output[scala.Option[String]] = output.map(_.map(_.operation))
def pysparkBatch : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.PySparkBatchResponse]] = output.map(_.map(_.pysparkBatch))
def runtimeConfig : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.RuntimeConfigResponse]] = output.map(_.map(_.runtimeConfig))
def runtimeInfo : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.RuntimeInfoResponse]] = output.map(_.map(_.runtimeInfo))
def sparkBatch : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.SparkBatchResponse]] = output.map(_.map(_.sparkBatch))
def sparkRBatch : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.SparkRBatchResponse]] = output.map(_.map(_.sparkRBatch))
def sparkSqlBatch : besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.outputs.SparkSqlBatchResponse]] = output.map(_.map(_.sparkSqlBatch))
def state : besom.types.Output[scala.Option[String]] = output.map(_.map(_.state))
def stateHistory : besom.types.Output[scala.Option[scala.collection.immutable.List[besom.api.googlenative.dataproc.v1.outputs.StateHistoryResponse]]] = output.map(_.map(_.stateHistory))
def stateMessage : besom.types.Output[scala.Option[String]] = output.map(_.map(_.stateMessage))
def stateTime : besom.types.Output[scala.Option[String]] = output.map(_.map(_.stateTime))
def uuid : besom.types.Output[scala.Option[String]] = output.map(_.map(_.uuid))
© 2015 - 2025 Weber Informatics LLC | Privacy Policy