
src.dataproc.v1.BatchArgs.scala Maven / Gradle / Ivy
package besom.api.googlenative.dataproc.v1
final case class BatchArgs private(
batchId: besom.types.Output[scala.Option[String]],
environmentConfig: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.inputs.EnvironmentConfigArgs]],
labels: besom.types.Output[scala.Option[scala.Predef.Map[String, String]]],
location: besom.types.Output[scala.Option[String]],
project: besom.types.Output[scala.Option[String]],
pysparkBatch: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.inputs.PySparkBatchArgs]],
requestId: besom.types.Output[scala.Option[String]],
runtimeConfig: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.inputs.RuntimeConfigArgs]],
sparkBatch: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.inputs.SparkBatchArgs]],
sparkRBatch: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.inputs.SparkRBatchArgs]],
sparkSqlBatch: besom.types.Output[scala.Option[besom.api.googlenative.dataproc.v1.inputs.SparkSqlBatchArgs]]
)
object BatchArgs:
def apply(
batchId: besom.types.Input.Optional[String] = scala.None,
environmentConfig: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1.inputs.EnvironmentConfigArgs] = scala.None,
labels: besom.types.Input.Optional[scala.Predef.Map[String, besom.types.Input[String]]] = scala.None,
location: besom.types.Input.Optional[String] = scala.None,
project: besom.types.Input.Optional[String] = scala.None,
pysparkBatch: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1.inputs.PySparkBatchArgs] = scala.None,
requestId: besom.types.Input.Optional[String] = scala.None,
runtimeConfig: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1.inputs.RuntimeConfigArgs] = scala.None,
sparkBatch: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1.inputs.SparkBatchArgs] = scala.None,
sparkRBatch: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1.inputs.SparkRBatchArgs] = scala.None,
sparkSqlBatch: besom.types.Input.Optional[besom.api.googlenative.dataproc.v1.inputs.SparkSqlBatchArgs] = scala.None
)(using besom.types.Context): BatchArgs =
new BatchArgs(
batchId = batchId.asOptionOutput(isSecret = false),
environmentConfig = environmentConfig.asOptionOutput(isSecret = false),
labels = labels.asOptionOutput(isSecret = false),
location = location.asOptionOutput(isSecret = false),
project = project.asOptionOutput(isSecret = false),
pysparkBatch = pysparkBatch.asOptionOutput(isSecret = false),
requestId = requestId.asOptionOutput(isSecret = false),
runtimeConfig = runtimeConfig.asOptionOutput(isSecret = false),
sparkBatch = sparkBatch.asOptionOutput(isSecret = false),
sparkRBatch = sparkRBatch.asOptionOutput(isSecret = false),
sparkSqlBatch = sparkSqlBatch.asOptionOutput(isSecret = false)
)
given encoder(using besom.types.Context): besom.types.Encoder[BatchArgs] =
besom.internal.Encoder.derived[BatchArgs]
given argsEncoder(using besom.types.Context): besom.types.ArgsEncoder[BatchArgs] =
besom.internal.ArgsEncoder.derived[BatchArgs]
© 2015 - 2025 Weber Informatics LLC | Privacy Policy