com.pulumi.gcp.dataproc.kotlin.WorkflowTemplateArgs.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-gcp-kotlin Show documentation
Show all versions of pulumi-gcp-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.dataproc.kotlin
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataproc.WorkflowTemplateArgs.builder
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateJobArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateJobArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateParameterArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplateParameterArgsBuilder
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplatePlacementArgs
import com.pulumi.gcp.dataproc.kotlin.inputs.WorkflowTemplatePlacementArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Deprecated
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName
/**
* A Workflow Template is a reusable workflow configuration. It defines a graph of jobs with information on where to run those jobs.
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
* const template = new gcp.dataproc.WorkflowTemplate("template", {
* name: "template-example",
* location: "us-central1",
* placement: {
* managedCluster: {
* clusterName: "my-cluster",
* config: {
* gceClusterConfig: {
* zone: "us-central1-a",
* tags: [
* "foo",
* "bar",
* ],
* },
* masterConfig: {
* numInstances: 1,
* machineType: "n1-standard-1",
* diskConfig: {
* bootDiskType: "pd-ssd",
* bootDiskSizeGb: 15,
* },
* },
* workerConfig: {
* numInstances: 3,
* machineType: "n1-standard-2",
* diskConfig: {
* bootDiskSizeGb: 10,
* numLocalSsds: 2,
* },
* },
* secondaryWorkerConfig: {
* numInstances: 2,
* },
* softwareConfig: {
* imageVersion: "2.0.35-debian10",
* },
* },
* },
* },
* jobs: [
* {
* stepId: "someJob",
* sparkJob: {
* mainClass: "SomeClass",
* },
* },
* {
* stepId: "otherJob",
* prerequisiteStepIds: ["someJob"],
* prestoJob: {
* queryFileUri: "someuri",
* },
* },
* ],
* });
* ```
* ```python
* import pulumi
* import pulumi_gcp as gcp
* template = gcp.dataproc.WorkflowTemplate("template",
* name="template-example",
* location="us-central1",
* placement=gcp.dataproc.WorkflowTemplatePlacementArgs(
* managed_cluster=gcp.dataproc.WorkflowTemplatePlacementManagedClusterArgs(
* cluster_name="my-cluster",
* config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigArgs(
* gce_cluster_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs(
* zone="us-central1-a",
* tags=[
* "foo",
* "bar",
* ],
* ),
* master_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs(
* num_instances=1,
* machine_type="n1-standard-1",
* disk_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs(
* boot_disk_type="pd-ssd",
* boot_disk_size_gb=15,
* ),
* ),
* worker_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs(
* num_instances=3,
* machine_type="n1-standard-2",
* disk_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs(
* boot_disk_size_gb=10,
* num_local_ssds=2,
* ),
* ),
* secondary_worker_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs(
* num_instances=2,
* ),
* software_config=gcp.dataproc.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs(
* image_version="2.0.35-debian10",
* ),
* ),
* ),
* ),
* jobs=[
* gcp.dataproc.WorkflowTemplateJobArgs(
* step_id="someJob",
* spark_job=gcp.dataproc.WorkflowTemplateJobSparkJobArgs(
* main_class="SomeClass",
* ),
* ),
* gcp.dataproc.WorkflowTemplateJobArgs(
* step_id="otherJob",
* prerequisite_step_ids=["someJob"],
* presto_job=gcp.dataproc.WorkflowTemplateJobPrestoJobArgs(
* query_file_uri="someuri",
* ),
* ),
* ])
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Gcp = Pulumi.Gcp;
* return await Deployment.RunAsync(() =>
* {
* var template = new Gcp.Dataproc.WorkflowTemplate("template", new()
* {
* Name = "template-example",
* Location = "us-central1",
* Placement = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementArgs
* {
* ManagedCluster = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterArgs
* {
* ClusterName = "my-cluster",
* Config = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigArgs
* {
* GceClusterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs
* {
* Zone = "us-central1-a",
* Tags = new[]
* {
* "foo",
* "bar",
* },
* },
* MasterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs
* {
* NumInstances = 1,
* MachineType = "n1-standard-1",
* DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs
* {
* BootDiskType = "pd-ssd",
* BootDiskSizeGb = 15,
* },
* },
* WorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs
* {
* NumInstances = 3,
* MachineType = "n1-standard-2",
* DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs
* {
* BootDiskSizeGb = 10,
* NumLocalSsds = 2,
* },
* },
* SecondaryWorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs
* {
* NumInstances = 2,
* },
* SoftwareConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs
* {
* ImageVersion = "2.0.35-debian10",
* },
* },
* },
* },
* Jobs = new[]
* {
* new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
* {
* StepId = "someJob",
* SparkJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkJobArgs
* {
* MainClass = "SomeClass",
* },
* },
* new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
* {
* StepId = "otherJob",
* PrerequisiteStepIds = new[]
* {
* "someJob",
* },
* PrestoJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPrestoJobArgs
* {
* QueryFileUri = "someuri",
* },
* },
* },
* });
* });
* ```
* ```go
* package main
* import (
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := dataproc.NewWorkflowTemplate(ctx, "template", &dataproc.WorkflowTemplateArgs{
* Name: pulumi.String("template-example"),
* Location: pulumi.String("us-central1"),
* Placement: &dataproc.WorkflowTemplatePlacementArgs{
* ManagedCluster: &dataproc.WorkflowTemplatePlacementManagedClusterArgs{
* ClusterName: pulumi.String("my-cluster"),
* Config: &dataproc.WorkflowTemplatePlacementManagedClusterConfigArgs{
* GceClusterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs{
* Zone: pulumi.String("us-central1-a"),
* Tags: pulumi.StringArray{
* pulumi.String("foo"),
* pulumi.String("bar"),
* },
* },
* MasterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs{
* NumInstances: pulumi.Int(1),
* MachineType: pulumi.String("n1-standard-1"),
* DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs{
* BootDiskType: pulumi.String("pd-ssd"),
* BootDiskSizeGb: pulumi.Int(15),
* },
* },
* WorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs{
* NumInstances: pulumi.Int(3),
* MachineType: pulumi.String("n1-standard-2"),
* DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs{
* BootDiskSizeGb: pulumi.Int(10),
* NumLocalSsds: pulumi.Int(2),
* },
* },
* SecondaryWorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs{
* NumInstances: pulumi.Int(2),
* },
* SoftwareConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs{
* ImageVersion: pulumi.String("2.0.35-debian10"),
* },
* },
* },
* },
* Jobs: dataproc.WorkflowTemplateJobArray{
* &dataproc.WorkflowTemplateJobArgs{
* StepId: pulumi.String("someJob"),
* SparkJob: &dataproc.WorkflowTemplateJobSparkJobArgs{
* MainClass: pulumi.String("SomeClass"),
* },
* },
* &dataproc.WorkflowTemplateJobArgs{
* StepId: pulumi.String("otherJob"),
* PrerequisiteStepIds: pulumi.StringArray{
* pulumi.String("someJob"),
* },
* PrestoJob: &dataproc.WorkflowTemplateJobPrestoJobArgs{
* QueryFileUri: pulumi.String("someuri"),
* },
* },
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.gcp.dataproc.WorkflowTemplate;
* import com.pulumi.gcp.dataproc.WorkflowTemplateArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobSparkJobArgs;
* import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobPrestoJobArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var template = new WorkflowTemplate("template", WorkflowTemplateArgs.builder()
* .name("template-example")
* .location("us-central1")
* .placement(WorkflowTemplatePlacementArgs.builder()
* .managedCluster(WorkflowTemplatePlacementManagedClusterArgs.builder()
* .clusterName("my-cluster")
* .config(WorkflowTemplatePlacementManagedClusterConfigArgs.builder()
* .gceClusterConfig(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs.builder()
* .zone("us-central1-a")
* .tags(
* "foo",
* "bar")
* .build())
* .masterConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs.builder()
* .numInstances(1)
* .machineType("n1-standard-1")
* .diskConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs.builder()
* .bootDiskType("pd-ssd")
* .bootDiskSizeGb(15)
* .build())
* .build())
* .workerConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs.builder()
* .numInstances(3)
* .machineType("n1-standard-2")
* .diskConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs.builder()
* .bootDiskSizeGb(10)
* .numLocalSsds(2)
* .build())
* .build())
* .secondaryWorkerConfig(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs.builder()
* .numInstances(2)
* .build())
* .softwareConfig(WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.builder()
* .imageVersion("2.0.35-debian10")
* .build())
* .build())
* .build())
* .build())
* .jobs(
* WorkflowTemplateJobArgs.builder()
* .stepId("someJob")
* .sparkJob(WorkflowTemplateJobSparkJobArgs.builder()
* .mainClass("SomeClass")
* .build())
* .build(),
* WorkflowTemplateJobArgs.builder()
* .stepId("otherJob")
* .prerequisiteStepIds("someJob")
* .prestoJob(WorkflowTemplateJobPrestoJobArgs.builder()
* .queryFileUri("someuri")
* .build())
* .build())
* .build());
* }
* }
* ```
* ```yaml
* resources:
* template:
* type: gcp:dataproc:WorkflowTemplate
* properties:
* name: template-example
* location: us-central1
* placement:
* managedCluster:
* clusterName: my-cluster
* config:
* gceClusterConfig:
* zone: us-central1-a
* tags:
* - foo
* - bar
* masterConfig:
* numInstances: 1
* machineType: n1-standard-1
* diskConfig:
* bootDiskType: pd-ssd
* bootDiskSizeGb: 15
* workerConfig:
* numInstances: 3
* machineType: n1-standard-2
* diskConfig:
* bootDiskSizeGb: 10
* numLocalSsds: 2
* secondaryWorkerConfig:
* numInstances: 2
* softwareConfig:
* imageVersion: 2.0.35-debian10
* jobs:
* - stepId: someJob
* sparkJob:
* mainClass: SomeClass
* - stepId: otherJob
* prerequisiteStepIds:
* - someJob
* prestoJob:
* queryFileUri: someuri
* ```
*
* ## Import
* WorkflowTemplate can be imported using any of these accepted formats:
* * `projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}`
* * `{{project}}/{{location}}/{{name}}`
* * `{{location}}/{{name}}`
* When using the `pulumi import` command, WorkflowTemplate can be imported using one of the formats above. For example:
* ```sh
* $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}
* ```
* ```sh
* $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{project}}/{{location}}/{{name}}
* ```
* ```sh
* $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{location}}/{{name}}
* ```
* @property dagTimeout Optional. Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of
* duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10
* minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at
* the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running
* on a [managed
* cluster](https://www.terraform.io/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
* the cluster is deleted.
* @property jobs Required. The Directed Acyclic Graph of Jobs to submit.
* @property labels Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created
* by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC
* 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63
* characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
* associated with a template. **Note**: This field is non-authoritative, and will only manage the labels present in your
* configuration. Please refer to the field `effective_labels` for all of the labels present on the resource.
* @property location The location for the resource
* @property name Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For `projects.regions.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * For `projects.locations.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`
* @property parameters Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided
* when the template is instantiated.
* @property placement Required. WorkflowTemplate scheduling information.
* @property project The project for the resource
* @property version Output only. The current version of this workflow template.
*/
public data class WorkflowTemplateArgs(
public val dagTimeout: Output? = null,
public val jobs: Output>? = null,
public val labels: Output
© 2015 - 2024 Weber Informatics LLC | Privacy Policy