com.pulumi.gcp.storage.kotlin.TransferJobArgs.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-gcp-kotlin Show documentation
Show all versions of pulumi-gcp-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.storage.kotlin
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.storage.TransferJobArgs.builder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobEventStreamArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobEventStreamArgsBuilder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobNotificationConfigArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobNotificationConfigArgsBuilder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobScheduleArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobScheduleArgsBuilder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobTransferSpecArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobTransferSpecArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.jvm.JvmName
/**
* Creates a new Transfer Job in Google Cloud Storage Transfer.
* To get more information about Google Cloud Storage Transfer, see:
* * [Overview](https://cloud.google.com/storage-transfer/docs/overview)
* * [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs)
* * How-to Guides
* * [Configuring Access to Data Sources and Sinks](https://cloud.google.com/storage-transfer/docs/configure-access)
* ## Example Usage
* Example creating a nightly Transfer Job from an AWS S3 Bucket to a GCS bucket.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
* const default = gcp.storage.getTransferProjectServiceAccount({
* project: project,
* });
* const s3_backup_bucket = new gcp.storage.Bucket("s3-backup-bucket", {
* name: `${awsS3Bucket}-backup`,
* storageClass: "NEARLINE",
* project: project,
* location: "US",
* });
* const s3_backup_bucketBucketIAMMember = new gcp.storage.BucketIAMMember("s3-backup-bucket", {
* bucket: s3_backup_bucket.name,
* role: "roles/storage.admin",
* member: _default.then(_default => `serviceAccount:${_default.email}`),
* });
* const topic = new gcp.pubsub.Topic("topic", {name: pubsubTopicName});
* const notificationConfig = new gcp.pubsub.TopicIAMMember("notification_config", {
* topic: topic.id,
* role: "roles/pubsub.publisher",
* member: _default.then(_default => `serviceAccount:${_default.email}`),
* });
* const s3_bucket_nightly_backup = new gcp.storage.TransferJob("s3-bucket-nightly-backup", {
* description: "Nightly backup of S3 bucket",
* project: project,
* transferSpec: {
* objectConditions: {
* maxTimeElapsedSinceLastModification: "600s",
* excludePrefixes: ["requests.gz"],
* },
* transferOptions: {
* deleteObjectsUniqueInSink: false,
* },
* awsS3DataSource: {
* bucketName: awsS3Bucket,
* awsAccessKey: {
* accessKeyId: awsAccessKey,
* secretAccessKey: awsSecretKey,
* },
* },
* gcsDataSink: {
* bucketName: s3_backup_bucket.name,
* path: "foo/bar/",
* },
* },
* schedule: {
* scheduleStartDate: {
* year: 2018,
* month: 10,
* day: 1,
* },
* scheduleEndDate: {
* year: 2019,
* month: 1,
* day: 15,
* },
* startTimeOfDay: {
* hours: 23,
* minutes: 30,
* seconds: 0,
* nanos: 0,
* },
* repeatInterval: "604800s",
* },
* notificationConfig: {
* pubsubTopic: topic.id,
* eventTypes: [
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED",
* ],
* payloadFormat: "JSON",
* },
* });
* ```
* ```python
* import pulumi
* import pulumi_gcp as gcp
* default = gcp.storage.get_transfer_project_service_account(project=project)
* s3_backup_bucket = gcp.storage.Bucket("s3-backup-bucket",
* name=f"{aws_s3_bucket}-backup",
* storage_class="NEARLINE",
* project=project,
* location="US")
* s3_backup_bucket_bucket_iam_member = gcp.storage.BucketIAMMember("s3-backup-bucket",
* bucket=s3_backup_bucket.name,
* role="roles/storage.admin",
* member=f"serviceAccount:{default.email}")
* topic = gcp.pubsub.Topic("topic", name=pubsub_topic_name)
* notification_config = gcp.pubsub.TopicIAMMember("notification_config",
* topic=topic.id,
* role="roles/pubsub.publisher",
* member=f"serviceAccount:{default.email}")
* s3_bucket_nightly_backup = gcp.storage.TransferJob("s3-bucket-nightly-backup",
* description="Nightly backup of S3 bucket",
* project=project,
* transfer_spec=gcp.storage.TransferJobTransferSpecArgs(
* object_conditions=gcp.storage.TransferJobTransferSpecObjectConditionsArgs(
* max_time_elapsed_since_last_modification="600s",
* exclude_prefixes=["requests.gz"],
* ),
* transfer_options=gcp.storage.TransferJobTransferSpecTransferOptionsArgs(
* delete_objects_unique_in_sink=False,
* ),
* aws_s3_data_source=gcp.storage.TransferJobTransferSpecAwsS3DataSourceArgs(
* bucket_name=aws_s3_bucket,
* aws_access_key=gcp.storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs(
* access_key_id=aws_access_key,
* secret_access_key=aws_secret_key,
* ),
* ),
* gcs_data_sink=gcp.storage.TransferJobTransferSpecGcsDataSinkArgs(
* bucket_name=s3_backup_bucket.name,
* path="foo/bar/",
* ),
* ),
* schedule=gcp.storage.TransferJobScheduleArgs(
* schedule_start_date=gcp.storage.TransferJobScheduleScheduleStartDateArgs(
* year=2018,
* month=10,
* day=1,
* ),
* schedule_end_date=gcp.storage.TransferJobScheduleScheduleEndDateArgs(
* year=2019,
* month=1,
* day=15,
* ),
* start_time_of_day=gcp.storage.TransferJobScheduleStartTimeOfDayArgs(
* hours=23,
* minutes=30,
* seconds=0,
* nanos=0,
* ),
* repeat_interval="604800s",
* ),
* notification_config=gcp.storage.TransferJobNotificationConfigArgs(
* pubsub_topic=topic.id,
* event_types=[
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED",
* ],
* payload_format="JSON",
* ))
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Gcp = Pulumi.Gcp;
* return await Deployment.RunAsync(() =>
* {
* var @default = Gcp.Storage.GetTransferProjectServiceAccount.Invoke(new()
* {
* Project = project,
* });
* var s3_backup_bucket = new Gcp.Storage.Bucket("s3-backup-bucket", new()
* {
* Name = $"{awsS3Bucket}-backup",
* StorageClass = "NEARLINE",
* Project = project,
* Location = "US",
* });
* var s3_backup_bucketBucketIAMMember = new Gcp.Storage.BucketIAMMember("s3-backup-bucket", new()
* {
* Bucket = s3_backup_bucket.Name,
* Role = "roles/storage.admin",
* Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServiceAccountResult => getTransferProjectServiceAccountResult.Email)}"),
* });
* var topic = new Gcp.PubSub.Topic("topic", new()
* {
* Name = pubsubTopicName,
* });
* var notificationConfig = new Gcp.PubSub.TopicIAMMember("notification_config", new()
* {
* Topic = topic.Id,
* Role = "roles/pubsub.publisher",
* Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServiceAccountResult => getTransferProjectServiceAccountResult.Email)}"),
* });
* var s3_bucket_nightly_backup = new Gcp.Storage.TransferJob("s3-bucket-nightly-backup", new()
* {
* Description = "Nightly backup of S3 bucket",
* Project = project,
* TransferSpec = new Gcp.Storage.Inputs.TransferJobTransferSpecArgs
* {
* ObjectConditions = new Gcp.Storage.Inputs.TransferJobTransferSpecObjectConditionsArgs
* {
* MaxTimeElapsedSinceLastModification = "600s",
* ExcludePrefixes = new[]
* {
* "requests.gz",
* },
* },
* TransferOptions = new Gcp.Storage.Inputs.TransferJobTransferSpecTransferOptionsArgs
* {
* DeleteObjectsUniqueInSink = false,
* },
* AwsS3DataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceArgs
* {
* BucketName = awsS3Bucket,
* AwsAccessKey = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs
* {
* AccessKeyId = awsAccessKey,
* SecretAccessKey = awsSecretKey,
* },
* },
* GcsDataSink = new Gcp.Storage.Inputs.TransferJobTransferSpecGcsDataSinkArgs
* {
* BucketName = s3_backup_bucket.Name,
* Path = "foo/bar/",
* },
* },
* Schedule = new Gcp.Storage.Inputs.TransferJobScheduleArgs
* {
* ScheduleStartDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleStartDateArgs
* {
* Year = 2018,
* Month = 10,
* Day = 1,
* },
* ScheduleEndDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleEndDateArgs
* {
* Year = 2019,
* Month = 1,
* Day = 15,
* },
* StartTimeOfDay = new Gcp.Storage.Inputs.TransferJobScheduleStartTimeOfDayArgs
* {
* Hours = 23,
* Minutes = 30,
* Seconds = 0,
* Nanos = 0,
* },
* RepeatInterval = "604800s",
* },
* NotificationConfig = new Gcp.Storage.Inputs.TransferJobNotificationConfigArgs
* {
* PubsubTopic = topic.Id,
* EventTypes = new[]
* {
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED",
* },
* PayloadFormat = "JSON",
* },
* });
* });
* ```
* ```go
* package main
* import (
* "fmt"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/pubsub"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/storage"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _default, err := storage.GetTransferProjectServiceAccount(ctx, &storage.GetTransferProjectServiceAccountArgs{
* Project: pulumi.StringRef(project),
* }, nil)
* if err != nil {
* return err
* }
* _, err = storage.NewBucket(ctx, "s3-backup-bucket", &storage.BucketArgs{
* Name: pulumi.String(fmt.Sprintf("%v-backup", awsS3Bucket)),
* StorageClass: pulumi.String("NEARLINE"),
* Project: pulumi.Any(project),
* Location: pulumi.String("US"),
* })
* if err != nil {
* return err
* }
* _, err = storage.NewBucketIAMMember(ctx, "s3-backup-bucket", &storage.BucketIAMMemberArgs{
* Bucket: s3_backup_bucket.Name,
* Role: pulumi.String("roles/storage.admin"),
* Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", _default.Email)),
* })
* if err != nil {
* return err
* }
* topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
* Name: pulumi.Any(pubsubTopicName),
* })
* if err != nil {
* return err
* }
* _, err = pubsub.NewTopicIAMMember(ctx, "notification_config", &pubsub.TopicIAMMemberArgs{
* Topic: topic.ID(),
* Role: pulumi.String("roles/pubsub.publisher"),
* Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", _default.Email)),
* })
* if err != nil {
* return err
* }
* _, err = storage.NewTransferJob(ctx, "s3-bucket-nightly-backup", &storage.TransferJobArgs{
* Description: pulumi.String("Nightly backup of S3 bucket"),
* Project: pulumi.Any(project),
* TransferSpec: &storage.TransferJobTransferSpecArgs{
* ObjectConditions: &storage.TransferJobTransferSpecObjectConditionsArgs{
* MaxTimeElapsedSinceLastModification: pulumi.String("600s"),
* ExcludePrefixes: pulumi.StringArray{
* pulumi.String("requests.gz"),
* },
* },
* TransferOptions: &storage.TransferJobTransferSpecTransferOptionsArgs{
* DeleteObjectsUniqueInSink: pulumi.Bool(false),
* },
* AwsS3DataSource: &storage.TransferJobTransferSpecAwsS3DataSourceArgs{
* BucketName: pulumi.Any(awsS3Bucket),
* AwsAccessKey: &storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs{
* AccessKeyId: pulumi.Any(awsAccessKey),
* SecretAccessKey: pulumi.Any(awsSecretKey),
* },
* },
* GcsDataSink: &storage.TransferJobTransferSpecGcsDataSinkArgs{
* BucketName: s3_backup_bucket.Name,
* Path: pulumi.String("foo/bar/"),
* },
* },
* Schedule: &storage.TransferJobScheduleArgs{
* ScheduleStartDate: &storage.TransferJobScheduleScheduleStartDateArgs{
* Year: pulumi.Int(2018),
* Month: pulumi.Int(10),
* Day: pulumi.Int(1),
* },
* ScheduleEndDate: &storage.TransferJobScheduleScheduleEndDateArgs{
* Year: pulumi.Int(2019),
* Month: pulumi.Int(1),
* Day: pulumi.Int(15),
* },
* StartTimeOfDay: &storage.TransferJobScheduleStartTimeOfDayArgs{
* Hours: pulumi.Int(23),
* Minutes: pulumi.Int(30),
* Seconds: pulumi.Int(0),
* Nanos: pulumi.Int(0),
* },
* RepeatInterval: pulumi.String("604800s"),
* },
* NotificationConfig: &storage.TransferJobNotificationConfigArgs{
* PubsubTopic: topic.ID(),
* EventTypes: pulumi.StringArray{
* pulumi.String("TRANSFER_OPERATION_SUCCESS"),
* pulumi.String("TRANSFER_OPERATION_FAILED"),
* },
* PayloadFormat: pulumi.String("JSON"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.gcp.storage.StorageFunctions;
* import com.pulumi.gcp.storage.inputs.GetTransferProjectServiceAccountArgs;
* import com.pulumi.gcp.storage.Bucket;
* import com.pulumi.gcp.storage.BucketArgs;
* import com.pulumi.gcp.storage.BucketIAMMember;
* import com.pulumi.gcp.storage.BucketIAMMemberArgs;
* import com.pulumi.gcp.pubsub.Topic;
* import com.pulumi.gcp.pubsub.TopicArgs;
* import com.pulumi.gcp.pubsub.TopicIAMMember;
* import com.pulumi.gcp.pubsub.TopicIAMMemberArgs;
* import com.pulumi.gcp.storage.TransferJob;
* import com.pulumi.gcp.storage.TransferJobArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecObjectConditionsArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecTransferOptionsArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecGcsDataSinkArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleStartDateArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleEndDateArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleStartTimeOfDayArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobNotificationConfigArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* final var default = StorageFunctions.getTransferProjectServiceAccount(GetTransferProjectServiceAccountArgs.builder()
* .project(project)
* .build());
* var s3_backup_bucket = new Bucket("s3-backup-bucket", BucketArgs.builder()
* .name(String.format("%s-backup", awsS3Bucket))
* .storageClass("NEARLINE")
* .project(project)
* .location("US")
* .build());
* var s3_backup_bucketBucketIAMMember = new BucketIAMMember("s3-backup-bucketBucketIAMMember", BucketIAMMemberArgs.builder()
* .bucket(s3_backup_bucket.name())
* .role("roles/storage.admin")
* .member(String.format("serviceAccount:%s", default_.email()))
* .build());
* var topic = new Topic("topic", TopicArgs.builder()
* .name(pubsubTopicName)
* .build());
* var notificationConfig = new TopicIAMMember("notificationConfig", TopicIAMMemberArgs.builder()
* .topic(topic.id())
* .role("roles/pubsub.publisher")
* .member(String.format("serviceAccount:%s", default_.email()))
* .build());
* var s3_bucket_nightly_backup = new TransferJob("s3-bucket-nightly-backup", TransferJobArgs.builder()
* .description("Nightly backup of S3 bucket")
* .project(project)
* .transferSpec(TransferJobTransferSpecArgs.builder()
* .objectConditions(TransferJobTransferSpecObjectConditionsArgs.builder()
* .maxTimeElapsedSinceLastModification("600s")
* .excludePrefixes("requests.gz")
* .build())
* .transferOptions(TransferJobTransferSpecTransferOptionsArgs.builder()
* .deleteObjectsUniqueInSink(false)
* .build())
* .awsS3DataSource(TransferJobTransferSpecAwsS3DataSourceArgs.builder()
* .bucketName(awsS3Bucket)
* .awsAccessKey(TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs.builder()
* .accessKeyId(awsAccessKey)
* .secretAccessKey(awsSecretKey)
* .build())
* .build())
* .gcsDataSink(TransferJobTransferSpecGcsDataSinkArgs.builder()
* .bucketName(s3_backup_bucket.name())
* .path("foo/bar/")
* .build())
* .build())
* .schedule(TransferJobScheduleArgs.builder()
* .scheduleStartDate(TransferJobScheduleScheduleStartDateArgs.builder()
* .year(2018)
* .month(10)
* .day(1)
* .build())
* .scheduleEndDate(TransferJobScheduleScheduleEndDateArgs.builder()
* .year(2019)
* .month(1)
* .day(15)
* .build())
* .startTimeOfDay(TransferJobScheduleStartTimeOfDayArgs.builder()
* .hours(23)
* .minutes(30)
* .seconds(0)
* .nanos(0)
* .build())
* .repeatInterval("604800s")
* .build())
* .notificationConfig(TransferJobNotificationConfigArgs.builder()
* .pubsubTopic(topic.id())
* .eventTypes(
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED")
* .payloadFormat("JSON")
* .build())
* .build());
* }
* }
* ```
* ```yaml
* resources:
* s3-backup-bucket:
* type: gcp:storage:Bucket
* properties:
* name: ${awsS3Bucket}-backup
* storageClass: NEARLINE
* project: ${project}
* location: US
* s3-backup-bucketBucketIAMMember:
* type: gcp:storage:BucketIAMMember
* name: s3-backup-bucket
* properties:
* bucket: ${["s3-backup-bucket"].name}
* role: roles/storage.admin
* member: serviceAccount:${default.email}
* topic:
* type: gcp:pubsub:Topic
* properties:
* name: ${pubsubTopicName}
* notificationConfig:
* type: gcp:pubsub:TopicIAMMember
* name: notification_config
* properties:
* topic: ${topic.id}
* role: roles/pubsub.publisher
* member: serviceAccount:${default.email}
* s3-bucket-nightly-backup:
* type: gcp:storage:TransferJob
* properties:
* description: Nightly backup of S3 bucket
* project: ${project}
* transferSpec:
* objectConditions:
* maxTimeElapsedSinceLastModification: 600s
* excludePrefixes:
* - requests.gz
* transferOptions:
* deleteObjectsUniqueInSink: false
* awsS3DataSource:
* bucketName: ${awsS3Bucket}
* awsAccessKey:
* accessKeyId: ${awsAccessKey}
* secretAccessKey: ${awsSecretKey}
* gcsDataSink:
* bucketName: ${["s3-backup-bucket"].name}
* path: foo/bar/
* schedule:
* scheduleStartDate:
* year: 2018
* month: 10
* day: 1
* scheduleEndDate:
* year: 2019
* month: 1
* day: 15
* startTimeOfDay:
* hours: 23
* minutes: 30
* seconds: 0
* nanos: 0
* repeatInterval: 604800s
* notificationConfig:
* pubsubTopic: ${topic.id}
* eventTypes:
* - TRANSFER_OPERATION_SUCCESS
* - TRANSFER_OPERATION_FAILED
* payloadFormat: JSON
* variables:
* default:
* fn::invoke:
* Function: gcp:storage:getTransferProjectServiceAccount
* Arguments:
* project: ${project}
* ```
*
* ## Import
* Storage Transfer Jobs can be imported using the Transfer Job's `project` and `name` (without the `transferJob/` prefix), e.g.
* * `{{project_id}}/{{name}}`, where `name` is a numeric value.
* When using the `pulumi import` command, Storage Transfer Jobs can be imported using one of the formats above. For example:
* ```sh
* $ pulumi import gcp:storage/transferJob:TransferJob default {{project_id}}/123456789
* ```
* @property description Unique description to identify the Transfer Job.
* @property eventStream Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
* @property name The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( `transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( `transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with `transferJobs/{{name}}` format, where `name` is a numeric value.
* @property notificationConfig Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
* @property project The project in which the resource belongs. If it
* is not provided, the provider project is used.
* @property schedule Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
* @property status Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.**
* @property transferSpec Transfer specification. Structure documented below.
* - - -
*/
public data class TransferJobArgs(
public val description: Output? = null,
public val eventStream: Output? = null,
public val name: Output? = null,
public val notificationConfig: Output? = null,
public val project: Output? = null,
public val schedule: Output? = null,
public val status: Output? = null,
public val transferSpec: Output? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.gcp.storage.TransferJobArgs =
com.pulumi.gcp.storage.TransferJobArgs.builder()
.description(description?.applyValue({ args0 -> args0 }))
.eventStream(eventStream?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.name(name?.applyValue({ args0 -> args0 }))
.notificationConfig(
notificationConfig?.applyValue({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
}),
)
.project(project?.applyValue({ args0 -> args0 }))
.schedule(schedule?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.status(status?.applyValue({ args0 -> args0 }))
.transferSpec(transferSpec?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) })).build()
}
/**
* Builder for [TransferJobArgs].
*/
@PulumiTagMarker
public class TransferJobArgsBuilder internal constructor() {
private var description: Output? = null
private var eventStream: Output? = null
private var name: Output? = null
private var notificationConfig: Output? = null
private var project: Output? = null
private var schedule: Output? = null
private var status: Output? = null
private var transferSpec: Output? = null
/**
* @param value Unique description to identify the Transfer Job.
*/
@JvmName("yearhfslkkckjhgd")
public suspend fun description(`value`: Output) {
this.description = value
}
/**
* @param value Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
*/
@JvmName("mysnopoipnkjvjwg")
public suspend fun eventStream(`value`: Output) {
this.eventStream = value
}
/**
* @param value The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( `transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( `transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with `transferJobs/{{name}}` format, where `name` is a numeric value.
*/
@JvmName("wlrbplfqxhgimypq")
public suspend fun name(`value`: Output) {
this.name = value
}
/**
* @param value Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
*/
@JvmName("auciummymfymdeig")
public suspend fun notificationConfig(`value`: Output) {
this.notificationConfig = value
}
/**
* @param value The project in which the resource belongs. If it
* is not provided, the provider project is used.
*/
@JvmName("dqxhrsjoqiaoyrpe")
public suspend fun project(`value`: Output) {
this.project = value
}
/**
* @param value Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
*/
@JvmName("alijlvngqnkciqdw")
public suspend fun schedule(`value`: Output) {
this.schedule = value
}
/**
* @param value Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.**
*/
@JvmName("ojfytkrnujaklvhq")
public suspend fun status(`value`: Output) {
this.status = value
}
/**
* @param value Transfer specification. Structure documented below.
* - - -
*/
@JvmName("uvvkngnisutpuocp")
public suspend fun transferSpec(`value`: Output) {
this.transferSpec = value
}
/**
* @param value Unique description to identify the Transfer Job.
*/
@JvmName("ditbedyscrnwrvrh")
public suspend fun description(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.description = mapped
}
/**
* @param value Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
*/
@JvmName("cepmxhmgqtfohfmh")
public suspend fun eventStream(`value`: TransferJobEventStreamArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.eventStream = mapped
}
/**
* @param argument Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
*/
@JvmName("komukxuyugxyrrve")
public suspend fun eventStream(argument: suspend TransferJobEventStreamArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobEventStreamArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.eventStream = mapped
}
/**
* @param value The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( `transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( `transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with `transferJobs/{{name}}` format, where `name` is a numeric value.
*/
@JvmName("iydghowonplgmttc")
public suspend fun name(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.name = mapped
}
/**
* @param value Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
*/
@JvmName("dxlftkqxwwuglfcw")
public suspend fun notificationConfig(`value`: TransferJobNotificationConfigArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.notificationConfig = mapped
}
/**
* @param argument Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
*/
@JvmName("ruhegfvifavokvwx")
public suspend fun notificationConfig(argument: suspend TransferJobNotificationConfigArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobNotificationConfigArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.notificationConfig = mapped
}
/**
* @param value The project in which the resource belongs. If it
* is not provided, the provider project is used.
*/
@JvmName("caflayurrbsihsni")
public suspend fun project(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.project = mapped
}
/**
* @param value Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
*/
@JvmName("wunroxgssedhhhcr")
public suspend fun schedule(`value`: TransferJobScheduleArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.schedule = mapped
}
/**
* @param argument Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
*/
@JvmName("wtbkgdqoufltmerg")
public suspend fun schedule(argument: suspend TransferJobScheduleArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobScheduleArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.schedule = mapped
}
/**
* @param value Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.**
*/
@JvmName("xdfeiliaymrrkmff")
public suspend fun status(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.status = mapped
}
/**
* @param value Transfer specification. Structure documented below.
* - - -
*/
@JvmName("tccgfqhnijdtdcnq")
public suspend fun transferSpec(`value`: TransferJobTransferSpecArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.transferSpec = mapped
}
/**
* @param argument Transfer specification. Structure documented below.
* - - -
*/
@JvmName("dyweqmhjeqssmosn")
public suspend fun transferSpec(argument: suspend TransferJobTransferSpecArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobTransferSpecArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.transferSpec = mapped
}
internal fun build(): TransferJobArgs = TransferJobArgs(
description = description,
eventStream = eventStream,
name = name,
notificationConfig = notificationConfig,
project = project,
schedule = schedule,
status = status,
transferSpec = transferSpec,
)
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy