All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataproc.kotlin.WorkflowTemplateArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataproc.kotlin

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.WorkflowTemplateArgs.builder
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateJobArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateJobArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateParameterArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateParameterArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplatePlacementArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplatePlacementArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Deprecated
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * A Workflow Template is a reusable workflow configuration. It defines a graph of jobs with information on where to run those jobs.
 * ## Example Usage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const template = new gcp.dataproc.WorkflowTemplate("template", {
 *     name: "template-example",
 *     location: "us-central1",
 *     placement: {
 *         managedCluster: {
 *             clusterName: "my-cluster",
 *             config: {
 *                 gceClusterConfig: {
 *                     zone: "us-central1-a",
 *                     tags: [
 *                         "foo",
 *                         "bar",
 *                     ],
 *                 },
 *                 masterConfig: {
 *                     numInstances: 1,
 *                     machineType: "n1-standard-1",
 *                     diskConfig: {
 *                         bootDiskType: "pd-ssd",
 *                         bootDiskSizeGb: 15,
 *                     },
 *                 },
 *                 workerConfig: {
 *                     numInstances: 3,
 *                     machineType: "n1-standard-2",
 *                     diskConfig: {
 *                         bootDiskSizeGb: 10,
 *                         numLocalSsds: 2,
 *                     },
 *                 },
 *                 secondaryWorkerConfig: {
 *                     numInstances: 2,
 *                 },
 *                 softwareConfig: {
 *                     imageVersion: "2.0.35-debian10",
 *                 },
 *             },
 *         },
 *     },
 *     jobs: [
 *         {
 *             stepId: "someJob",
 *             sparkJob: {
 *                 mainClass: "SomeClass",
 *             },
 *         },
 *         {
 *             stepId: "otherJob",
 *             prerequisiteStepIds: ["someJob"],
 *             prestoJob: {
 *                 queryFileUri: "someuri",
 *             },
 *         },
 *     ],
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * template = gcp.dataproc.WorkflowTemplate("template",
 *     name="template-example",
 *     location="us-central1",
 *     placement=gcp.dataproc.WorkflowTemplatePlacementArgs(
 *         managed_cluster=gcp.dataproc.WorkflowTemplatePlacementManagedClusterArgs(
 *             cluster_name="my-cluster",
 *             config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigArgs(
 *                 gce_cluster_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs(
 *                     zone="us-central1-a",
 *                     tags=[
 *                         "foo",
 *                         "bar",
 *                     ],
 *                 ),
 *                 master_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs(
 *                     num_instances=1,
 *                     machine_type="n1-standard-1",
 *                     disk_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs(
 *                         boot_disk_type="pd-ssd",
 *                         boot_disk_size_gb=15,
 *                     ),
 *                 ),
 *                 worker_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs(
 *                     num_instances=3,
 *                     machine_type="n1-standard-2",
 *                     disk_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs(
 *                         boot_disk_size_gb=10,
 *                         num_local_ssds=2,
 *                     ),
 *                 ),
 *                 secondary_worker_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs(
 *                     num_instances=2,
 *                 ),
 *                 software_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs(
 *                     image_version="2.0.35-debian10",
 *                 ),
 *             ),
 *         ),
 *     ),
 *     jobs=[
 *         gcp.dataproc.WorkflowTemplateJobArgs(
 *             step_id="someJob",
 *             spark_job=gcp.dataproc.WorkflowTemplateJobSparkJobArgs(
 *                 main_class="SomeClass",
 *             ),
 *         ),
 *         gcp.dataproc.WorkflowTemplateJobArgs(
 *             step_id="otherJob",
 *             prerequisite_step_ids=["someJob"],
 *             presto_job=gcp.dataproc.WorkflowTemplateJobPrestoJobArgs(
 *                 query_file_uri="someuri",
 *             ),
 *         ),
 *     ])
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var template = new Gcp.Dataproc.WorkflowTemplate("template", new()
 *     {
 *         Name = "template-example",
 *         Location = "us-central1",
 *         Placement = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementArgs
 *         {
 *             ManagedCluster = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterArgs
 *             {
 *                 ClusterName = "my-cluster",
 *                 Config = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigArgs
 *                 {
 *                     GceClusterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs
 *                     {
 *                         Zone = "us-central1-a",
 *                         Tags = new[]
 *                         {
 *                             "foo",
 *                             "bar",
 *                         },
 *                     },
 *                     MasterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs
 *                     {
 *                         NumInstances = 1,
 *                         MachineType = "n1-standard-1",
 *                         DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs
 *                         {
 *                             BootDiskType = "pd-ssd",
 *                             BootDiskSizeGb = 15,
 *                         },
 *                     },
 *                     WorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs
 *                     {
 *                         NumInstances = 3,
 *                         MachineType = "n1-standard-2",
 *                         DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs
 *                         {
 *                             BootDiskSizeGb = 10,
 *                             NumLocalSsds = 2,
 *                         },
 *                     },
 *                     SecondaryWorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs
 *                     {
 *                         NumInstances = 2,
 *                     },
 *                     SoftwareConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs
 *                     {
 *                         ImageVersion = "2.0.35-debian10",
 *                     },
 *                 },
 *             },
 *         },
 *         Jobs = new[]
 *         {
 *             new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
 *             {
 *                 StepId = "someJob",
 *                 SparkJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkJobArgs
 *                 {
 *                     MainClass = "SomeClass",
 *                 },
 *             },
 *             new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
 *             {
 *                 StepId = "otherJob",
 *                 PrerequisiteStepIds = new[]
 *                 {
 *                     "someJob",
 *                 },
 *                 PrestoJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPrestoJobArgs
 *                 {
 *                     QueryFileUri = "someuri",
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataproc.NewWorkflowTemplate(ctx, "template", &dataproc.WorkflowTemplateArgs{
 * 			Name:     pulumi.String("template-example"),
 * 			Location: pulumi.String("us-central1"),
 * 			Placement: &dataproc.WorkflowTemplatePlacementArgs{
 * 				ManagedCluster: &dataproc.WorkflowTemplatePlacementManagedClusterArgs{
 * 					ClusterName: pulumi.String("my-cluster"),
 * 					Config: &dataproc.WorkflowTemplatePlacementManagedClusterConfigArgs{
 * 						GceClusterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs{
 * 							Zone: pulumi.String("us-central1-a"),
 * 							Tags: pulumi.StringArray{
 * 								pulumi.String("foo"),
 * 								pulumi.String("bar"),
 * 							},
 * 						},
 * 						MasterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs{
 * 							NumInstances: pulumi.Int(1),
 * 							MachineType:  pulumi.String("n1-standard-1"),
 * 							DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs{
 * 								BootDiskType:   pulumi.String("pd-ssd"),
 * 								BootDiskSizeGb: pulumi.Int(15),
 * 							},
 * 						},
 * 						WorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs{
 * 							NumInstances: pulumi.Int(3),
 * 							MachineType:  pulumi.String("n1-standard-2"),
 * 							DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs{
 * 								BootDiskSizeGb: pulumi.Int(10),
 * 								NumLocalSsds:   pulumi.Int(2),
 * 							},
 * 						},
 * 						SecondaryWorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs{
 * 							NumInstances: pulumi.Int(2),
 * 						},
 * 						SoftwareConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs{
 * 							ImageVersion: pulumi.String("2.0.35-debian10"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 			Jobs: dataproc.WorkflowTemplateJobArray{
 * 				&dataproc.WorkflowTemplateJobArgs{
 * 					StepId: pulumi.String("someJob"),
 * 					SparkJob: &dataproc.WorkflowTemplateJobSparkJobArgs{
 * 						MainClass: pulumi.String("SomeClass"),
 * 					},
 * 				},
 * 				&dataproc.WorkflowTemplateJobArgs{
 * 					StepId: pulumi.String("otherJob"),
 * 					PrerequisiteStepIds: pulumi.StringArray{
 * 						pulumi.String("someJob"),
 * 					},
 * 					PrestoJob: &dataproc.WorkflowTemplateJobPrestoJobArgs{
 * 						QueryFileUri: pulumi.String("someuri"),
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataproc.WorkflowTemplate;
 * import com.pulumi.gcp.dataproc.WorkflowTemplateArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobSparkJobArgs;
 * import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobPrestoJobArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var template = new WorkflowTemplate("template", WorkflowTemplateArgs.builder()
 *             .name("template-example")
 *             .location("us-central1")
 *             .placement(WorkflowTemplatePlacementArgs.builder()
 *                 .managedCluster(WorkflowTemplatePlacementManagedClusterArgs.builder()
 *                     .clusterName("my-cluster")
 *                     .config(WorkflowTemplatePlacementManagedClusterConfigArgs.builder()
 *                         .gceClusterConfig(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs.builder()
 *                             .zone("us-central1-a")
 *                             .tags(
 *                                 "foo",
 *                                 "bar")
 *                             .build())
 *                         .masterConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs.builder()
 *                             .numInstances(1)
 *                             .machineType("n1-standard-1")
 *                             .diskConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs.builder()
 *                                 .bootDiskType("pd-ssd")
 *                                 .bootDiskSizeGb(15)
 *                                 .build())
 *                             .build())
 *                         .workerConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs.builder()
 *                             .numInstances(3)
 *                             .machineType("n1-standard-2")
 *                             .diskConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs.builder()
 *                                 .bootDiskSizeGb(10)
 *                                 .numLocalSsds(2)
 *                                 .build())
 *                             .build())
 *                         .secondaryWorkerConfig(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs.builder()
 *                             .numInstances(2)
 *                             .build())
 *                         .softwareConfig(WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.builder()
 *                             .imageVersion("2.0.35-debian10")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .jobs(
 *                 WorkflowTemplateJobArgs.builder()
 *                     .stepId("someJob")
 *                     .sparkJob(WorkflowTemplateJobSparkJobArgs.builder()
 *                         .mainClass("SomeClass")
 *                         .build())
 *                     .build(),
 *                 WorkflowTemplateJobArgs.builder()
 *                     .stepId("otherJob")
 *                     .prerequisiteStepIds("someJob")
 *                     .prestoJob(WorkflowTemplateJobPrestoJobArgs.builder()
 *                         .queryFileUri("someuri")
 *                         .build())
 *                     .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   template:
 *     type: gcp:dataproc:WorkflowTemplate
 *     properties:
 *       name: template-example
 *       location: us-central1
 *       placement:
 *         managedCluster:
 *           clusterName: my-cluster
 *           config:
 *             gceClusterConfig:
 *               zone: us-central1-a
 *               tags:
 *                 - foo
 *                 - bar
 *             masterConfig:
 *               numInstances: 1
 *               machineType: n1-standard-1
 *               diskConfig:
 *                 bootDiskType: pd-ssd
 *                 bootDiskSizeGb: 15
 *             workerConfig:
 *               numInstances: 3
 *               machineType: n1-standard-2
 *               diskConfig:
 *                 bootDiskSizeGb: 10
 *                 numLocalSsds: 2
 *             secondaryWorkerConfig:
 *               numInstances: 2
 *             softwareConfig:
 *               imageVersion: 2.0.35-debian10
 *       jobs:
 *         - stepId: someJob
 *           sparkJob:
 *             mainClass: SomeClass
 *         - stepId: otherJob
 *           prerequisiteStepIds:
 *             - someJob
 *           prestoJob:
 *             queryFileUri: someuri
 * ```
 * 
 * ## Import
 * WorkflowTemplate can be imported using any of these accepted formats:
 * * `projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}`
 * * `{{project}}/{{location}}/{{name}}`
 * * `{{location}}/{{name}}`
 * When using the `pulumi import` command, WorkflowTemplate can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}
 * ```
 * ```sh
 * $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{project}}/{{location}}/{{name}}
 * ```
 * ```sh
 * $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{location}}/{{name}}
 * ```
 * @property dagTimeout Optional. Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of
 * duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10
 * minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at
 * the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running
 * on a [managed
 * cluster](https://www.terraform.io/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
 * the cluster is deleted.
 * @property jobs Required. The Directed Acyclic Graph of Jobs to submit.
 * @property labels Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created
 * by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC
 * 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63
 * characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
 * associated with a template. **Note**: This field is non-authoritative, and will only manage the labels present in your
 * configuration. Please refer to the field `effective_labels` for all of the labels present on the resource.
 * @property location The location for the resource
 * @property name Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For `projects.regions.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * For `projects.locations.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`
 * @property parameters Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
 * when the template is instantiated.
 * @property placement Required. WorkflowTemplate scheduling information.
 * @property project The project for the resource
 * @property version Output only. The current version of this workflow template.
 */
public data class WorkflowTemplateArgs(
    public val dagTimeout: Output? = null,
    public val jobs: Output>? = null,
    public val labels: Output>? = null,
    public val location: Output? = null,
    public val name: Output? = null,
    public val parameters: Output>? = null,
    public val placement: Output? = null,
    public val project: Output? = null,
    @Deprecated(
        message = """
  version is not useful as a configurable field, and will be removed in the future.
  """,
    )
    public val version: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataproc.WorkflowTemplateArgs =
        com.pulumi.gcp.dataproc.WorkflowTemplateArgs.builder()
            .dagTimeout(dagTimeout?.applyValue({ args0 -> args0 }))
            .jobs(jobs?.applyValue({ args0 -> args0.map({ args0 -> args0.let({ args0 -> args0.toJava() }) }) }))
            .labels(labels?.applyValue({ args0 -> args0.map({ args0 -> args0.key.to(args0.value) }).toMap() }))
            .location(location?.applyValue({ args0 -> args0 }))
            .name(name?.applyValue({ args0 -> args0 }))
            .parameters(
                parameters?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            )
            .placement(placement?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .project(project?.applyValue({ args0 -> args0 }))
            .version(version?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [WorkflowTemplateArgs].
 */
@PulumiTagMarker
public class WorkflowTemplateArgsBuilder internal constructor() {
    private var dagTimeout: Output? = null

    private var jobs: Output>? = null

    private var labels: Output>? = null

    private var location: Output? = null

    private var name: Output? = null

    private var parameters: Output>? = null

    private var placement: Output? = null

    private var project: Output? = null

    private var version: Output? = null

    /**
     * @param value Optional. Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of
   * duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10
     * minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at
     * the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running
     * on a [managed
   * cluster](https://www.terraform.io/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
     * the cluster is deleted.
     */
    @JvmName("tvhfjnqysrlvvmtn")
    public suspend fun dagTimeout(`value`: Output) {
        this.dagTimeout = value
    }

    /**
     * @param value Required. The Directed Acyclic Graph of Jobs to submit.
     */
    @JvmName("avxgbhhjliemrtol")
    public suspend fun jobs(`value`: Output>) {
        this.jobs = value
    }

    @JvmName("pxtnsermtxisdnpb")
    public suspend fun jobs(vararg values: Output) {
        this.jobs = Output.all(values.asList())
    }

    /**
     * @param values Required. The Directed Acyclic Graph of Jobs to submit.
     */
    @JvmName("vvbsymcoosvopygx")
    public suspend fun jobs(values: List>) {
        this.jobs = Output.all(values)
    }

    /**
     * @param value Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created
     * by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC
   * 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63
     * characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
     * associated with a template. **Note**: This field is non-authoritative, and will only manage the labels present in your
     * configuration. Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("qqrfnpdkpfoonpgb")
    public suspend fun labels(`value`: Output>) {
        this.labels = value
    }

    /**
     * @param value The location for the resource
     */
    @JvmName("eggbuqhbaxpmpakp")
    public suspend fun location(`value`: Output) {
        this.location = value
    }

    /**
     * @param value Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For `projects.regions.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * For `projects.locations.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`
     */
    @JvmName("wukvuotiedsgyoew")
    public suspend fun name(`value`: Output) {
        this.name = value
    }

    /**
     * @param value Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
     * when the template is instantiated.
     */
    @JvmName("rnvjnunpvpndbdkt")
    public suspend fun parameters(`value`: Output>) {
        this.parameters = value
    }

    @JvmName("nftrvmttjuhkxuww")
    public suspend fun parameters(vararg values: Output) {
        this.parameters = Output.all(values.asList())
    }

    /**
     * @param values Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
     * when the template is instantiated.
     */
    @JvmName("poqdumdcjhhejcrn")
    public suspend fun parameters(values: List>) {
        this.parameters = Output.all(values)
    }

    /**
     * @param value Required. WorkflowTemplate scheduling information.
     */
    @JvmName("osamtqkpvchrlqoc")
    public suspend fun placement(`value`: Output) {
        this.placement = value
    }

    /**
     * @param value The project for the resource
     */
    @JvmName("lnwiyqjfmwnqkvth")
    public suspend fun project(`value`: Output) {
        this.project = value
    }

    /**
     * @param value Output only. The current version of this workflow template.
     */
    @Deprecated(
        message = """
  version is not useful as a configurable field, and will be removed in the future.
  """,
    )
    @JvmName("mjmfjbuffktvlhry")
    public suspend fun version(`value`: Output) {
        this.version = value
    }

    /**
     * @param value Optional. Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of
   * duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10
     * minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at
     * the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running
     * on a [managed
   * cluster](https://www.terraform.io/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
     * the cluster is deleted.
     */
    @JvmName("bbecemdatgnacnus")
    public suspend fun dagTimeout(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dagTimeout = mapped
    }

    /**
     * @param value Required. The Directed Acyclic Graph of Jobs to submit.
     */
    @JvmName("ahchyklxuipelnee")
    public suspend fun jobs(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.jobs = mapped
    }

    /**
     * @param argument Required. The Directed Acyclic Graph of Jobs to submit.
     */
    @JvmName("nbqponsyaomcrbek")
    public suspend fun jobs(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            WorkflowTemplateJobArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.jobs = mapped
    }

    /**
     * @param argument Required. The Directed Acyclic Graph of Jobs to submit.
     */
    @JvmName("bvutlfbudenomjyf")
    public suspend fun jobs(vararg argument: suspend WorkflowTemplateJobArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            WorkflowTemplateJobArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.jobs = mapped
    }

    /**
     * @param argument Required. The Directed Acyclic Graph of Jobs to submit.
     */
    @JvmName("dghcccrxbpsgrdix")
    public suspend fun jobs(argument: suspend WorkflowTemplateJobArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(WorkflowTemplateJobArgsBuilder().applySuspend { argument() }.build())
        val mapped = of(toBeMapped)
        this.jobs = mapped
    }

    /**
     * @param values Required. The Directed Acyclic Graph of Jobs to submit.
     */
    @JvmName("wyypctdxegwhhiyc")
    public suspend fun jobs(vararg values: WorkflowTemplateJobArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.jobs = mapped
    }

    /**
     * @param value Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created
     * by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC
   * 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63
     * characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
     * associated with a template. **Note**: This field is non-authoritative, and will only manage the labels present in your
     * configuration. Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("ruoymdbsctppqldb")
    public suspend fun labels(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param values Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created
     * by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC
   * 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63
     * characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
     * associated with a template. **Note**: This field is non-authoritative, and will only manage the labels present in your
     * configuration. Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("axmjdkbyitysxwfh")
    public fun labels(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param value The location for the resource
     */
    @JvmName("eugedqqxodmhnghc")
    public suspend fun location(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.location = mapped
    }

    /**
     * @param value Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For `projects.regions.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * For `projects.locations.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`
     */
    @JvmName("eiynwwdrlfffofnn")
    public suspend fun name(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.name = mapped
    }

    /**
     * @param value Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
     * when the template is instantiated.
     */
    @JvmName("tiaaewqgtkrnmpud")
    public suspend fun parameters(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    /**
     * @param argument Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
     * when the template is instantiated.
     */
    @JvmName("oigyxjhrhcerlokx")
    public suspend fun parameters(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            WorkflowTemplateParameterArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.parameters = mapped
    }

    /**
     * @param argument Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
     * when the template is instantiated.
     */
    @JvmName("uqelaracoynjlwhb")
    public suspend fun parameters(vararg argument: suspend WorkflowTemplateParameterArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            WorkflowTemplateParameterArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.parameters = mapped
    }

    /**
     * @param argument Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
     * when the template is instantiated.
     */
    @JvmName("dolalwkcgctovbdw")
    public suspend fun parameters(argument: suspend WorkflowTemplateParameterArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(
            WorkflowTemplateParameterArgsBuilder().applySuspend {
                argument()
            }.build(),
        )
        val mapped = of(toBeMapped)
        this.parameters = mapped
    }

    /**
     * @param values Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
     * when the template is instantiated.
     */
    @JvmName("nkgwudeuxwijsxxc")
    public suspend fun parameters(vararg values: WorkflowTemplateParameterArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.parameters = mapped
    }

    /**
     * @param value Required. WorkflowTemplate scheduling information.
     */
    @JvmName("fufndwyfktfpsumu")
    public suspend fun placement(`value`: WorkflowTemplatePlacementArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.placement = mapped
    }

    /**
     * @param argument Required. WorkflowTemplate scheduling information.
     */
    @JvmName("cqmlrrrmqmxlabbg")
    public suspend fun placement(argument: suspend WorkflowTemplatePlacementArgsBuilder.() -> Unit) {
        val toBeMapped = WorkflowTemplatePlacementArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.placement = mapped
    }

    /**
     * @param value The project for the resource
     */
    @JvmName("qvjllikiqeifcllb")
    public suspend fun project(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.project = mapped
    }

    /**
     * @param value Output only. The current version of this workflow template.
     */
    @Deprecated(
        message = """
  version is not useful as a configurable field, and will be removed in the future.
  """,
    )
    @JvmName("hovstdkmneipxyys")
    public suspend fun version(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.version = mapped
    }

    internal fun build(): WorkflowTemplateArgs = WorkflowTemplateArgs(
        dagTimeout = dagTimeout,
        jobs = jobs,
        labels = labels,
        location = location,
        name = name,
        parameters = parameters,
        placement = placement,
        project = project,
        version = version,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy