com.pulumi.gcp.dataflow.kotlin.Job.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-gcp-kotlin Show documentation
Show all versions of pulumi-gcp-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.dataflow.kotlin
import com.pulumi.core.Output
import com.pulumi.kotlin.KotlinCustomResource
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.ResourceMapper
import com.pulumi.kotlin.options.CustomResourceOptions
import com.pulumi.kotlin.options.CustomResourceOptionsBuilder
import com.pulumi.resources.Resource
import kotlin.Any
import kotlin.Boolean
import kotlin.Int
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
/**
* Builder for [Job].
*/
@PulumiTagMarker
public class JobResourceBuilder internal constructor() {
public var name: String? = null
public var args: JobArgs = JobArgs()
public var opts: CustomResourceOptions = CustomResourceOptions()
/**
* @param name The _unique_ name of the resulting resource.
*/
public fun name(`value`: String) {
this.name = value
}
/**
* @param block The arguments to use to populate this resource's properties.
*/
public suspend fun args(block: suspend JobArgsBuilder.() -> Unit) {
val builder = JobArgsBuilder()
block(builder)
this.args = builder.build()
}
/**
* @param block A bag of options that control this resource's behavior.
*/
public suspend fun opts(block: suspend CustomResourceOptionsBuilder.() -> Unit) {
this.opts = com.pulumi.kotlin.options.CustomResourceOptions.opts(block)
}
internal fun build(): Job {
val builtJavaResource = com.pulumi.gcp.dataflow.Job(
this.name,
this.args.toJava(),
this.opts.toJava(),
)
return Job(builtJavaResource)
}
}
/**
* Creates a job on Dataflow, which is an implementation of Apache Beam running on Google Compute Engine. For more information see
* the official documentation for
* [Beam](https://beam.apache.org) and [Dataflow](https://cloud.google.com/dataflow/).
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
* const bigDataJob = new gcp.dataflow.Job("big_data_job", {
* name: "dataflow-job",
* templateGcsPath: "gs://my-bucket/templates/template_file",
* tempGcsLocation: "gs://my-bucket/tmp_dir",
* parameters: {
* foo: "bar",
* baz: "qux",
* },
* });
* ```
* ```python
* import pulumi
* import pulumi_gcp as gcp
* big_data_job = gcp.dataflow.Job("big_data_job",
* name="dataflow-job",
* template_gcs_path="gs://my-bucket/templates/template_file",
* temp_gcs_location="gs://my-bucket/tmp_dir",
* parameters={
* "foo": "bar",
* "baz": "qux",
* })
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Gcp = Pulumi.Gcp;
* return await Deployment.RunAsync(() =>
* {
* var bigDataJob = new Gcp.Dataflow.Job("big_data_job", new()
* {
* Name = "dataflow-job",
* TemplateGcsPath = "gs://my-bucket/templates/template_file",
* TempGcsLocation = "gs://my-bucket/tmp_dir",
* Parameters =
* {
* { "foo", "bar" },
* { "baz", "qux" },
* },
* });
* });
* ```
* ```go
* package main
* import (
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataflow"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _, err := dataflow.NewJob(ctx, "big_data_job", &dataflow.JobArgs{
* Name: pulumi.String("dataflow-job"),
* TemplateGcsPath: pulumi.String("gs://my-bucket/templates/template_file"),
* TempGcsLocation: pulumi.String("gs://my-bucket/tmp_dir"),
* Parameters: pulumi.Map{
* "foo": pulumi.Any("bar"),
* "baz": pulumi.Any("qux"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.gcp.dataflow.Job;
* import com.pulumi.gcp.dataflow.JobArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var bigDataJob = new Job("bigDataJob", JobArgs.builder()
* .name("dataflow-job")
* .templateGcsPath("gs://my-bucket/templates/template_file")
* .tempGcsLocation("gs://my-bucket/tmp_dir")
* .parameters(Map.ofEntries(
* Map.entry("foo", "bar"),
* Map.entry("baz", "qux")
* ))
* .build());
* }
* }
* ```
* ```yaml
* resources:
* bigDataJob:
* type: gcp:dataflow:Job
* name: big_data_job
* properties:
* name: dataflow-job
* templateGcsPath: gs://my-bucket/templates/template_file
* tempGcsLocation: gs://my-bucket/tmp_dir
* parameters:
* foo: bar
* baz: qux
* ```
*
* ### Streaming Job
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
* const topic = new gcp.pubsub.Topic("topic", {name: "dataflow-job1"});
* const bucket1 = new gcp.storage.Bucket("bucket1", {
* name: "tf-test-bucket1",
* location: "US",
* forceDestroy: true,
* });
* const bucket2 = new gcp.storage.Bucket("bucket2", {
* name: "tf-test-bucket2",
* location: "US",
* forceDestroy: true,
* });
* const pubsubStream = new gcp.dataflow.Job("pubsub_stream", {
* name: "tf-test-dataflow-job1",
* templateGcsPath: "gs://my-bucket/templates/template_file",
* tempGcsLocation: "gs://my-bucket/tmp_dir",
* enableStreamingEngine: true,
* parameters: {
* inputFilePattern: pulumi.interpolate`${bucket1.url}/*.json`,
* outputTopic: topic.id,
* },
* transformNameMapping: {
* name: "test_job",
* env: "test",
* },
* onDelete: "cancel",
* });
* ```
* ```python
* import pulumi
* import pulumi_gcp as gcp
* topic = gcp.pubsub.Topic("topic", name="dataflow-job1")
* bucket1 = gcp.storage.Bucket("bucket1",
* name="tf-test-bucket1",
* location="US",
* force_destroy=True)
* bucket2 = gcp.storage.Bucket("bucket2",
* name="tf-test-bucket2",
* location="US",
* force_destroy=True)
* pubsub_stream = gcp.dataflow.Job("pubsub_stream",
* name="tf-test-dataflow-job1",
* template_gcs_path="gs://my-bucket/templates/template_file",
* temp_gcs_location="gs://my-bucket/tmp_dir",
* enable_streaming_engine=True,
* parameters={
* "inputFilePattern": bucket1.url.apply(lambda url: f"{url}/*.json"),
* "outputTopic": topic.id,
* },
* transform_name_mapping={
* "name": "test_job",
* "env": "test",
* },
* on_delete="cancel")
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Gcp = Pulumi.Gcp;
* return await Deployment.RunAsync(() =>
* {
* var topic = new Gcp.PubSub.Topic("topic", new()
* {
* Name = "dataflow-job1",
* });
* var bucket1 = new Gcp.Storage.Bucket("bucket1", new()
* {
* Name = "tf-test-bucket1",
* Location = "US",
* ForceDestroy = true,
* });
* var bucket2 = new Gcp.Storage.Bucket("bucket2", new()
* {
* Name = "tf-test-bucket2",
* Location = "US",
* ForceDestroy = true,
* });
* var pubsubStream = new Gcp.Dataflow.Job("pubsub_stream", new()
* {
* Name = "tf-test-dataflow-job1",
* TemplateGcsPath = "gs://my-bucket/templates/template_file",
* TempGcsLocation = "gs://my-bucket/tmp_dir",
* EnableStreamingEngine = true,
* Parameters =
* {
* { "inputFilePattern", bucket1.Url.Apply(url => $"{url}/*.json") },
* { "outputTopic", topic.Id },
* },
* TransformNameMapping =
* {
* { "name", "test_job" },
* { "env", "test" },
* },
* OnDelete = "cancel",
* });
* });
* ```
* ```go
* package main
* import (
* "fmt"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataflow"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/pubsub"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/storage"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
* Name: pulumi.String("dataflow-job1"),
* })
* if err != nil {
* return err
* }
* bucket1, err := storage.NewBucket(ctx, "bucket1", &storage.BucketArgs{
* Name: pulumi.String("tf-test-bucket1"),
* Location: pulumi.String("US"),
* ForceDestroy: pulumi.Bool(true),
* })
* if err != nil {
* return err
* }
* _, err = storage.NewBucket(ctx, "bucket2", &storage.BucketArgs{
* Name: pulumi.String("tf-test-bucket2"),
* Location: pulumi.String("US"),
* ForceDestroy: pulumi.Bool(true),
* })
* if err != nil {
* return err
* }
* _, err = dataflow.NewJob(ctx, "pubsub_stream", &dataflow.JobArgs{
* Name: pulumi.String("tf-test-dataflow-job1"),
* TemplateGcsPath: pulumi.String("gs://my-bucket/templates/template_file"),
* TempGcsLocation: pulumi.String("gs://my-bucket/tmp_dir"),
* EnableStreamingEngine: pulumi.Bool(true),
* Parameters: pulumi.Map{
* "inputFilePattern": bucket1.Url.ApplyT(func(url string) (string, error) {
* return fmt.Sprintf("%v/*.json", url), nil
* }).(pulumi.StringOutput),
* "outputTopic": topic.ID(),
* },
* TransformNameMapping: pulumi.Map{
* "name": pulumi.Any("test_job"),
* "env": pulumi.Any("test"),
* },
* OnDelete: pulumi.String("cancel"),
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.gcp.pubsub.Topic;
* import com.pulumi.gcp.pubsub.TopicArgs;
* import com.pulumi.gcp.storage.Bucket;
* import com.pulumi.gcp.storage.BucketArgs;
* import com.pulumi.gcp.dataflow.Job;
* import com.pulumi.gcp.dataflow.JobArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* var topic = new Topic("topic", TopicArgs.builder()
* .name("dataflow-job1")
* .build());
* var bucket1 = new Bucket("bucket1", BucketArgs.builder()
* .name("tf-test-bucket1")
* .location("US")
* .forceDestroy(true)
* .build());
* var bucket2 = new Bucket("bucket2", BucketArgs.builder()
* .name("tf-test-bucket2")
* .location("US")
* .forceDestroy(true)
* .build());
* var pubsubStream = new Job("pubsubStream", JobArgs.builder()
* .name("tf-test-dataflow-job1")
* .templateGcsPath("gs://my-bucket/templates/template_file")
* .tempGcsLocation("gs://my-bucket/tmp_dir")
* .enableStreamingEngine(true)
* .parameters(Map.ofEntries(
* Map.entry("inputFilePattern", bucket1.url().applyValue(url -> String.format("%s/*.json", url))),
* Map.entry("outputTopic", topic.id())
* ))
* .transformNameMapping(Map.ofEntries(
* Map.entry("name", "test_job"),
* Map.entry("env", "test")
* ))
* .onDelete("cancel")
* .build());
* }
* }
* ```
* ```yaml
* resources:
* topic:
* type: gcp:pubsub:Topic
* properties:
* name: dataflow-job1
* bucket1:
* type: gcp:storage:Bucket
* properties:
* name: tf-test-bucket1
* location: US
* forceDestroy: true
* bucket2:
* type: gcp:storage:Bucket
* properties:
* name: tf-test-bucket2
* location: US
* forceDestroy: true
* pubsubStream:
* type: gcp:dataflow:Job
* name: pubsub_stream
* properties:
* name: tf-test-dataflow-job1
* templateGcsPath: gs://my-bucket/templates/template_file
* tempGcsLocation: gs://my-bucket/tmp_dir
* enableStreamingEngine: true
* parameters:
* inputFilePattern: ${bucket1.url}/*.json
* outputTopic: ${topic.id}
* transformNameMapping:
* name: test_job
* env: test
* onDelete: cancel
* ```
*
* ## Note on "destroy" / "apply"
* There are many types of Dataflow jobs. Some Dataflow jobs run constantly, getting new data from (e.g.) a GCS bucket, and outputting data continuously. Some jobs process a set amount of data then terminate. All jobs can fail while running due to programming errors or other issues. In this way, Dataflow jobs are different from most other Google resources.
* The Dataflow resource is considered 'existing' while it is in a nonterminal state. If it reaches a terminal state (e.g. 'FAILED', 'COMPLETE', 'CANCELLED'), it will be recreated on the next 'apply'. This is as expected for jobs which run continuously, but may surprise users who use this resource for other kinds of Dataflow jobs.
* A Dataflow job which is 'destroyed' may be "cancelled" or "drained". If "cancelled", the job terminates - any data written remains where it is, but no new data will be processed. If "drained", no new data will enter the pipeline, but any data currently in the pipeline will finish being processed. The default is "drain". When `on_delete` is set to `"drain"` in the configuration, you may experience a long wait for your `pulumi destroy` to complete.
* You can potentially short-circuit the wait by setting `skip_wait_on_job_termination` to `true`, but beware that unless you take active steps to ensure that the job `name` parameter changes between instances, the name will conflict and the launch of the new job will fail. One way to do this is with a random_id resource, for example:
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
* import * as random from "@pulumi/random";
* const config = new pulumi.Config();
* const bigDataJobSubscriptionId = config.get("bigDataJobSubscriptionId") || "projects/myproject/subscriptions/messages";
* const bigDataJobNameSuffix = new random.RandomId("big_data_job_name_suffix", {
* byteLength: 4,
* keepers: {
* region: region,
* subscription_id: bigDataJobSubscriptionId,
* },
* });
* const bigDataJob = new gcp.dataflow.FlexTemplateJob("big_data_job", {
* name: pulumi.interpolate`dataflow-flextemplates-job-${bigDataJobNameSuffix.dec}`,
* region: region,
* containerSpecGcsPath: "gs://my-bucket/templates/template.json",
* skipWaitOnJobTermination: true,
* parameters: {
* inputSubscription: bigDataJobSubscriptionId,
* },
* });
* ```
* ```python
* import pulumi
* import pulumi_gcp as gcp
* import pulumi_random as random
* config = pulumi.Config()
* big_data_job_subscription_id = config.get("bigDataJobSubscriptionId")
* if big_data_job_subscription_id is None:
* big_data_job_subscription_id = "projects/myproject/subscriptions/messages"
* big_data_job_name_suffix = random.RandomId("big_data_job_name_suffix",
* byte_length=4,
* keepers={
* "region": region,
* "subscription_id": big_data_job_subscription_id,
* })
* big_data_job = gcp.dataflow.FlexTemplateJob("big_data_job",
* name=big_data_job_name_suffix.dec.apply(lambda dec: f"dataflow-flextemplates-job-{dec}"),
* region=region,
* container_spec_gcs_path="gs://my-bucket/templates/template.json",
* skip_wait_on_job_termination=True,
* parameters={
* "inputSubscription": big_data_job_subscription_id,
* })
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Gcp = Pulumi.Gcp;
* using Random = Pulumi.Random;
* return await Deployment.RunAsync(() =>
* {
* var config = new Config();
* var bigDataJobSubscriptionId = config.Get("bigDataJobSubscriptionId") ?? "projects/myproject/subscriptions/messages";
* var bigDataJobNameSuffix = new Random.RandomId("big_data_job_name_suffix", new()
* {
* ByteLength = 4,
* Keepers =
* {
* { "region", region },
* { "subscription_id", bigDataJobSubscriptionId },
* },
* });
* var bigDataJob = new Gcp.Dataflow.FlexTemplateJob("big_data_job", new()
* {
* Name = bigDataJobNameSuffix.Dec.Apply(dec => $"dataflow-flextemplates-job-{dec}"),
* Region = region,
* ContainerSpecGcsPath = "gs://my-bucket/templates/template.json",
* SkipWaitOnJobTermination = true,
* Parameters =
* {
* { "inputSubscription", bigDataJobSubscriptionId },
* },
* });
* });
* ```
* ```go
* package main
* import (
* "fmt"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataflow"
* "github.com/pulumi/pulumi-random/sdk/v4/go/random"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* cfg := config.New(ctx, "")
* bigDataJobSubscriptionId := "projects/myproject/subscriptions/messages"
* if param := cfg.Get("bigDataJobSubscriptionId"); param != "" {
* bigDataJobSubscriptionId = param
* }
* bigDataJobNameSuffix, err := random.NewRandomId(ctx, "big_data_job_name_suffix", &random.RandomIdArgs{
* ByteLength: pulumi.Int(4),
* Keepers: pulumi.StringMap{
* "region": pulumi.Any(region),
* "subscription_id": pulumi.String(bigDataJobSubscriptionId),
* },
* })
* if err != nil {
* return err
* }
* _, err = dataflow.NewFlexTemplateJob(ctx, "big_data_job", &dataflow.FlexTemplateJobArgs{
* Name: bigDataJobNameSuffix.Dec.ApplyT(func(dec string) (string, error) {
* return fmt.Sprintf("dataflow-flextemplates-job-%v", dec), nil
* }).(pulumi.StringOutput),
* Region: pulumi.Any(region),
* ContainerSpecGcsPath: pulumi.String("gs://my-bucket/templates/template.json"),
* SkipWaitOnJobTermination: pulumi.Bool(true),
* Parameters: pulumi.Map{
* "inputSubscription": pulumi.String(bigDataJobSubscriptionId),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.random.RandomId;
* import com.pulumi.random.RandomIdArgs;
* import com.pulumi.gcp.dataflow.FlexTemplateJob;
* import com.pulumi.gcp.dataflow.FlexTemplateJobArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* final var config = ctx.config();
* final var bigDataJobSubscriptionId = config.get("bigDataJobSubscriptionId").orElse("projects/myproject/subscriptions/messages");
* var bigDataJobNameSuffix = new RandomId("bigDataJobNameSuffix", RandomIdArgs.builder()
* .byteLength(4)
* .keepers(Map.ofEntries(
* Map.entry("region", region),
* Map.entry("subscription_id", bigDataJobSubscriptionId)
* ))
* .build());
* var bigDataJob = new FlexTemplateJob("bigDataJob", FlexTemplateJobArgs.builder()
* .name(bigDataJobNameSuffix.dec().applyValue(dec -> String.format("dataflow-flextemplates-job-%s", dec)))
* .region(region)
* .containerSpecGcsPath("gs://my-bucket/templates/template.json")
* .skipWaitOnJobTermination(true)
* .parameters(Map.of("inputSubscription", bigDataJobSubscriptionId))
* .build());
* }
* }
* ```
* ```yaml
* configuration:
* bigDataJobSubscriptionId:
* type: string
* default: projects/myproject/subscriptions/messages
* resources:
* bigDataJobNameSuffix:
* type: random:RandomId
* name: big_data_job_name_suffix
* properties:
* byteLength: 4
* keepers:
* region: ${region}
* subscription_id: ${bigDataJobSubscriptionId}
* bigDataJob:
* type: gcp:dataflow:FlexTemplateJob
* name: big_data_job
* properties:
* name: dataflow-flextemplates-job-${bigDataJobNameSuffix.dec}
* region: ${region}
* containerSpecGcsPath: gs://my-bucket/templates/template.json
* skipWaitOnJobTermination: true
* parameters:
* inputSubscription: ${bigDataJobSubscriptionId}
* ```
*
* ## Import
* Dataflow jobs can be imported using the job `id` e.g.
* * `{{id}}`
* When using the `pulumi import` command, dataflow jobs can be imported using one of the formats above. For example:
* ```sh
* $ pulumi import gcp:dataflow/job:Job default {{id}}
* ```
* */*/*/*/*/*/
*/
public class Job internal constructor(
override val javaResource: com.pulumi.gcp.dataflow.Job,
) : KotlinCustomResource(javaResource, JobMapper) {
/**
* List of experiments that should be used by the job. An example value is `["enable_stackdriver_agent_metrics"]`.
*/
public val additionalExperiments: Output>
get() = javaResource.additionalExperiments().applyValue({ args0 -> args0.map({ args0 -> args0 }) })
/**
* All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
*/
public val effectiveLabels: Output
© 2015 - 2024 Weber Informatics LLC | Privacy Policy