All Downloads are FREE. Search and download functionalities are using the official Maven repository.

src.dataproc.v1beta2.JobArgs.scala Maven / Gradle / Ivy

There is a newer version: 0.32.0-core.0.4
Show newest version
package besom.api.googlenative.dataproc.v1beta2

final case class JobArgs private(
  hadoopJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.HadoopJobArgs]],
  hiveJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.HiveJobArgs]],
  labels: besom.types.Output[scala.Option[scala.Predef.Map[String, String]]],
  pigJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.PigJobArgs]],
  placement: besom.types.Output[besom.api.googlenative.dataproc.v1beta2.inputs.JobPlacementArgs],
  prestoJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.PrestoJobArgs]],
  project: besom.types.Output[scala.Option[String]],
  pysparkJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.PySparkJobArgs]],
  reference: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.JobReferenceArgs]],
  region: besom.types.Output[String],
  requestId: besom.types.Output[scala.Option[String]],
  scheduling: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.JobSchedulingArgs]],
  sparkJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.SparkJobArgs]],
  sparkRJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.SparkRJobArgs]],
  sparkSqlJob: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1beta2.inputs.SparkSqlJobArgs]]
)

object JobArgs:
  def apply(
    hadoopJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.HadoopJobArgs] = scala.None,
    hiveJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.HiveJobArgs] = scala.None,
    labels: besom.types.Input.Optional[scala.Predef.Map[String, besom.types.Input[String]]] = scala.None,
    pigJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.PigJobArgs] = scala.None,
    placement: besom.types.Input[besom.api.googlenative.dataproc.v1beta2.inputs.JobPlacementArgs],
    prestoJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.PrestoJobArgs] = scala.None,
    project: besom.types.Input.Optional[String] = scala.None,
    pysparkJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.PySparkJobArgs] = scala.None,
    reference: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.JobReferenceArgs] = scala.None,
    region: besom.types.Input[String],
    requestId: besom.types.Input.Optional[String] = scala.None,
    scheduling: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.JobSchedulingArgs] = scala.None,
    sparkJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.SparkJobArgs] = scala.None,
    sparkRJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.SparkRJobArgs] = scala.None,
    sparkSqlJob: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1beta2.inputs.SparkSqlJobArgs] = scala.None
  )(using besom.types.Context): JobArgs =
    new JobArgs(
      hadoopJob = hadoopJob.asOptionOutput(isSecret = false),
      hiveJob = hiveJob.asOptionOutput(isSecret = false),
      labels = labels.asOptionOutput(isSecret = false),
      pigJob = pigJob.asOptionOutput(isSecret = false),
      placement = placement.asOutput(isSecret = false),
      prestoJob = prestoJob.asOptionOutput(isSecret = false),
      project = project.asOptionOutput(isSecret = false),
      pysparkJob = pysparkJob.asOptionOutput(isSecret = false),
      reference = reference.asOptionOutput(isSecret = false),
      region = region.asOutput(isSecret = false),
      requestId = requestId.asOptionOutput(isSecret = false),
      scheduling = scheduling.asOptionOutput(isSecret = false),
      sparkJob = sparkJob.asOptionOutput(isSecret = false),
      sparkRJob = sparkRJob.asOptionOutput(isSecret = false),
      sparkSqlJob = sparkSqlJob.asOptionOutput(isSecret = false)
    )

  given encoder(using besom.types.Context): besom.types.Encoder[JobArgs] =
    besom.internal.Encoder.derived[JobArgs]
  given argsEncoder(using besom.types.Context): besom.types.ArgsEncoder[JobArgs] =
    besom.internal.ArgsEncoder.derived[JobArgs]






© 2015 - 2025 Weber Informatics LLC | Privacy Policy