Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.pulumi.gcp.storage.kotlin.TransferJobArgs.kt Maven / Gradle / Ivy
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.storage.kotlin
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.storage.TransferJobArgs.builder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobEventStreamArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobEventStreamArgsBuilder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobNotificationConfigArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobNotificationConfigArgsBuilder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobScheduleArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobScheduleArgsBuilder
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobTransferSpecArgs
import com.pulumi.gcp.storage.kotlin.inputs.TransferJobTransferSpecArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.jvm.JvmName
/**
* Creates a new Transfer Job in Google Cloud Storage Transfer.
* To get more information about Google Cloud Storage Transfer, see:
* * [Overview](https://cloud.google.com/storage-transfer/docs/overview)
* * [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs)
* * How-to Guides
* * [Configuring Access to Data Sources and Sinks](https://cloud.google.com/storage-transfer/docs/configure-access)
* ## Example Usage
* Example creating a nightly Transfer Job from an AWS S3 Bucket to a GCS bucket.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
* const default = gcp.storage.getTransferProjectServiceAccount({
* project: project,
* });
* const s3_backup_bucket = new gcp.storage.Bucket("s3-backup-bucket", {
* name: `${awsS3Bucket}-backup`,
* storageClass: "NEARLINE",
* project: project,
* location: "US",
* });
* const s3_backup_bucketBucketIAMMember = new gcp.storage.BucketIAMMember("s3-backup-bucket", {
* bucket: s3_backup_bucket.name,
* role: "roles/storage.admin",
* member: _default.then(_default => `serviceAccount:${_default.email}`),
* }, {
* dependsOn: [s3_backup_bucket],
* });
* const topic = new gcp.pubsub.Topic("topic", {name: pubsubTopicName});
* const notificationConfig = new gcp.pubsub.TopicIAMMember("notification_config", {
* topic: topic.id,
* role: "roles/pubsub.publisher",
* member: _default.then(_default => `serviceAccount:${_default.email}`),
* });
* const s3_bucket_nightly_backup = new gcp.storage.TransferJob("s3-bucket-nightly-backup", {
* description: "Nightly backup of S3 bucket",
* project: project,
* transferSpec: {
* objectConditions: {
* maxTimeElapsedSinceLastModification: "600s",
* excludePrefixes: ["requests.gz"],
* },
* transferOptions: {
* deleteObjectsUniqueInSink: false,
* },
* awsS3DataSource: {
* bucketName: awsS3Bucket,
* awsAccessKey: {
* accessKeyId: awsAccessKey,
* secretAccessKey: awsSecretKey,
* },
* },
* gcsDataSink: {
* bucketName: s3_backup_bucket.name,
* path: "foo/bar/",
* },
* },
* schedule: {
* scheduleStartDate: {
* year: 2018,
* month: 10,
* day: 1,
* },
* scheduleEndDate: {
* year: 2019,
* month: 1,
* day: 15,
* },
* startTimeOfDay: {
* hours: 23,
* minutes: 30,
* seconds: 0,
* nanos: 0,
* },
* repeatInterval: "604800s",
* },
* notificationConfig: {
* pubsubTopic: topic.id,
* eventTypes: [
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED",
* ],
* payloadFormat: "JSON",
* },
* }, {
* dependsOn: [
* s3_backup_bucketBucketIAMMember,
* notificationConfig,
* ],
* });
* ```
* ```python
* import pulumi
* import pulumi_gcp as gcp
* default = gcp.storage.get_transfer_project_service_account(project=project)
* s3_backup_bucket = gcp.storage.Bucket("s3-backup-bucket",
* name=f"{aws_s3_bucket}-backup",
* storage_class="NEARLINE",
* project=project,
* location="US")
* s3_backup_bucket_bucket_iam_member = gcp.storage.BucketIAMMember("s3-backup-bucket",
* bucket=s3_backup_bucket.name,
* role="roles/storage.admin",
* member=f"serviceAccount:{default.email}",
* opts = pulumi.ResourceOptions(depends_on=[s3_backup_bucket]))
* topic = gcp.pubsub.Topic("topic", name=pubsub_topic_name)
* notification_config = gcp.pubsub.TopicIAMMember("notification_config",
* topic=topic.id,
* role="roles/pubsub.publisher",
* member=f"serviceAccount:{default.email}")
* s3_bucket_nightly_backup = gcp.storage.TransferJob("s3-bucket-nightly-backup",
* description="Nightly backup of S3 bucket",
* project=project,
* transfer_spec={
* "object_conditions": {
* "max_time_elapsed_since_last_modification": "600s",
* "exclude_prefixes": ["requests.gz"],
* },
* "transfer_options": {
* "delete_objects_unique_in_sink": False,
* },
* "aws_s3_data_source": {
* "bucket_name": aws_s3_bucket,
* "aws_access_key": {
* "access_key_id": aws_access_key,
* "secret_access_key": aws_secret_key,
* },
* },
* "gcs_data_sink": {
* "bucket_name": s3_backup_bucket.name,
* "path": "foo/bar/",
* },
* },
* schedule={
* "schedule_start_date": {
* "year": 2018,
* "month": 10,
* "day": 1,
* },
* "schedule_end_date": {
* "year": 2019,
* "month": 1,
* "day": 15,
* },
* "start_time_of_day": {
* "hours": 23,
* "minutes": 30,
* "seconds": 0,
* "nanos": 0,
* },
* "repeat_interval": "604800s",
* },
* notification_config={
* "pubsub_topic": topic.id,
* "event_types": [
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED",
* ],
* "payload_format": "JSON",
* },
* opts = pulumi.ResourceOptions(depends_on=[
* s3_backup_bucket_bucket_iam_member,
* notification_config,
* ]))
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Gcp = Pulumi.Gcp;
* return await Deployment.RunAsync(() =>
* {
* var @default = Gcp.Storage.GetTransferProjectServiceAccount.Invoke(new()
* {
* Project = project,
* });
* var s3_backup_bucket = new Gcp.Storage.Bucket("s3-backup-bucket", new()
* {
* Name = $"{awsS3Bucket}-backup",
* StorageClass = "NEARLINE",
* Project = project,
* Location = "US",
* });
* var s3_backup_bucketBucketIAMMember = new Gcp.Storage.BucketIAMMember("s3-backup-bucket", new()
* {
* Bucket = s3_backup_bucket.Name,
* Role = "roles/storage.admin",
* Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServiceAccountResult => getTransferProjectServiceAccountResult.Email)}"),
* }, new CustomResourceOptions
* {
* DependsOn =
* {
* s3_backup_bucket,
* },
* });
* var topic = new Gcp.PubSub.Topic("topic", new()
* {
* Name = pubsubTopicName,
* });
* var notificationConfig = new Gcp.PubSub.TopicIAMMember("notification_config", new()
* {
* Topic = topic.Id,
* Role = "roles/pubsub.publisher",
* Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServiceAccountResult => getTransferProjectServiceAccountResult.Email)}"),
* });
* var s3_bucket_nightly_backup = new Gcp.Storage.TransferJob("s3-bucket-nightly-backup", new()
* {
* Description = "Nightly backup of S3 bucket",
* Project = project,
* TransferSpec = new Gcp.Storage.Inputs.TransferJobTransferSpecArgs
* {
* ObjectConditions = new Gcp.Storage.Inputs.TransferJobTransferSpecObjectConditionsArgs
* {
* MaxTimeElapsedSinceLastModification = "600s",
* ExcludePrefixes = new[]
* {
* "requests.gz",
* },
* },
* TransferOptions = new Gcp.Storage.Inputs.TransferJobTransferSpecTransferOptionsArgs
* {
* DeleteObjectsUniqueInSink = false,
* },
* AwsS3DataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceArgs
* {
* BucketName = awsS3Bucket,
* AwsAccessKey = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs
* {
* AccessKeyId = awsAccessKey,
* SecretAccessKey = awsSecretKey,
* },
* },
* GcsDataSink = new Gcp.Storage.Inputs.TransferJobTransferSpecGcsDataSinkArgs
* {
* BucketName = s3_backup_bucket.Name,
* Path = "foo/bar/",
* },
* },
* Schedule = new Gcp.Storage.Inputs.TransferJobScheduleArgs
* {
* ScheduleStartDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleStartDateArgs
* {
* Year = 2018,
* Month = 10,
* Day = 1,
* },
* ScheduleEndDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleEndDateArgs
* {
* Year = 2019,
* Month = 1,
* Day = 15,
* },
* StartTimeOfDay = new Gcp.Storage.Inputs.TransferJobScheduleStartTimeOfDayArgs
* {
* Hours = 23,
* Minutes = 30,
* Seconds = 0,
* Nanos = 0,
* },
* RepeatInterval = "604800s",
* },
* NotificationConfig = new Gcp.Storage.Inputs.TransferJobNotificationConfigArgs
* {
* PubsubTopic = topic.Id,
* EventTypes = new[]
* {
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED",
* },
* PayloadFormat = "JSON",
* },
* }, new CustomResourceOptions
* {
* DependsOn =
* {
* s3_backup_bucketBucketIAMMember,
* notificationConfig,
* },
* });
* });
* ```
* ```go
* package main
* import (
* "fmt"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/pubsub"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/storage"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* _default, err := storage.GetTransferProjectServiceAccount(ctx, &storage.GetTransferProjectServiceAccountArgs{
* Project: pulumi.StringRef(project),
* }, nil)
* if err != nil {
* return err
* }
* _, err = storage.NewBucket(ctx, "s3-backup-bucket", &storage.BucketArgs{
* Name: pulumi.Sprintf("%v-backup", awsS3Bucket),
* StorageClass: pulumi.String("NEARLINE"),
* Project: pulumi.Any(project),
* Location: pulumi.String("US"),
* })
* if err != nil {
* return err
* }
* _, err = storage.NewBucketIAMMember(ctx, "s3-backup-bucket", &storage.BucketIAMMemberArgs{
* Bucket: s3_backup_bucket.Name,
* Role: pulumi.String("roles/storage.admin"),
* Member: pulumi.Sprintf("serviceAccount:%v", _default.Email),
* }, pulumi.DependsOn([]pulumi.Resource{
* s3_backup_bucket,
* }))
* if err != nil {
* return err
* }
* topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
* Name: pulumi.Any(pubsubTopicName),
* })
* if err != nil {
* return err
* }
* notificationConfig, err := pubsub.NewTopicIAMMember(ctx, "notification_config", &pubsub.TopicIAMMemberArgs{
* Topic: topic.ID(),
* Role: pulumi.String("roles/pubsub.publisher"),
* Member: pulumi.Sprintf("serviceAccount:%v", _default.Email),
* })
* if err != nil {
* return err
* }
* _, err = storage.NewTransferJob(ctx, "s3-bucket-nightly-backup", &storage.TransferJobArgs{
* Description: pulumi.String("Nightly backup of S3 bucket"),
* Project: pulumi.Any(project),
* TransferSpec: &storage.TransferJobTransferSpecArgs{
* ObjectConditions: &storage.TransferJobTransferSpecObjectConditionsArgs{
* MaxTimeElapsedSinceLastModification: pulumi.String("600s"),
* ExcludePrefixes: pulumi.StringArray{
* pulumi.String("requests.gz"),
* },
* },
* TransferOptions: &storage.TransferJobTransferSpecTransferOptionsArgs{
* DeleteObjectsUniqueInSink: pulumi.Bool(false),
* },
* AwsS3DataSource: &storage.TransferJobTransferSpecAwsS3DataSourceArgs{
* BucketName: pulumi.Any(awsS3Bucket),
* AwsAccessKey: &storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs{
* AccessKeyId: pulumi.Any(awsAccessKey),
* SecretAccessKey: pulumi.Any(awsSecretKey),
* },
* },
* GcsDataSink: &storage.TransferJobTransferSpecGcsDataSinkArgs{
* BucketName: s3_backup_bucket.Name,
* Path: pulumi.String("foo/bar/"),
* },
* },
* Schedule: &storage.TransferJobScheduleArgs{
* ScheduleStartDate: &storage.TransferJobScheduleScheduleStartDateArgs{
* Year: pulumi.Int(2018),
* Month: pulumi.Int(10),
* Day: pulumi.Int(1),
* },
* ScheduleEndDate: &storage.TransferJobScheduleScheduleEndDateArgs{
* Year: pulumi.Int(2019),
* Month: pulumi.Int(1),
* Day: pulumi.Int(15),
* },
* StartTimeOfDay: &storage.TransferJobScheduleStartTimeOfDayArgs{
* Hours: pulumi.Int(23),
* Minutes: pulumi.Int(30),
* Seconds: pulumi.Int(0),
* Nanos: pulumi.Int(0),
* },
* RepeatInterval: pulumi.String("604800s"),
* },
* NotificationConfig: &storage.TransferJobNotificationConfigArgs{
* PubsubTopic: topic.ID(),
* EventTypes: pulumi.StringArray{
* pulumi.String("TRANSFER_OPERATION_SUCCESS"),
* pulumi.String("TRANSFER_OPERATION_FAILED"),
* },
* PayloadFormat: pulumi.String("JSON"),
* },
* }, pulumi.DependsOn([]pulumi.Resource{
* s3_backup_bucketBucketIAMMember,
* notificationConfig,
* }))
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.gcp.storage.StorageFunctions;
* import com.pulumi.gcp.storage.inputs.GetTransferProjectServiceAccountArgs;
* import com.pulumi.gcp.storage.Bucket;
* import com.pulumi.gcp.storage.BucketArgs;
* import com.pulumi.gcp.storage.BucketIAMMember;
* import com.pulumi.gcp.storage.BucketIAMMemberArgs;
* import com.pulumi.gcp.pubsub.Topic;
* import com.pulumi.gcp.pubsub.TopicArgs;
* import com.pulumi.gcp.pubsub.TopicIAMMember;
* import com.pulumi.gcp.pubsub.TopicIAMMemberArgs;
* import com.pulumi.gcp.storage.TransferJob;
* import com.pulumi.gcp.storage.TransferJobArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecObjectConditionsArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecTransferOptionsArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecGcsDataSinkArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleStartDateArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleEndDateArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobScheduleStartTimeOfDayArgs;
* import com.pulumi.gcp.storage.inputs.TransferJobNotificationConfigArgs;
* import com.pulumi.resources.CustomResourceOptions;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* final var default = StorageFunctions.getTransferProjectServiceAccount(GetTransferProjectServiceAccountArgs.builder()
* .project(project)
* .build());
* var s3_backup_bucket = new Bucket("s3-backup-bucket", BucketArgs.builder()
* .name(String.format("%s-backup", awsS3Bucket))
* .storageClass("NEARLINE")
* .project(project)
* .location("US")
* .build());
* var s3_backup_bucketBucketIAMMember = new BucketIAMMember("s3-backup-bucketBucketIAMMember", BucketIAMMemberArgs.builder()
* .bucket(s3_backup_bucket.name())
* .role("roles/storage.admin")
* .member(String.format("serviceAccount:%s", default_.email()))
* .build(), CustomResourceOptions.builder()
* .dependsOn(s3_backup_bucket)
* .build());
* var topic = new Topic("topic", TopicArgs.builder()
* .name(pubsubTopicName)
* .build());
* var notificationConfig = new TopicIAMMember("notificationConfig", TopicIAMMemberArgs.builder()
* .topic(topic.id())
* .role("roles/pubsub.publisher")
* .member(String.format("serviceAccount:%s", default_.email()))
* .build());
* var s3_bucket_nightly_backup = new TransferJob("s3-bucket-nightly-backup", TransferJobArgs.builder()
* .description("Nightly backup of S3 bucket")
* .project(project)
* .transferSpec(TransferJobTransferSpecArgs.builder()
* .objectConditions(TransferJobTransferSpecObjectConditionsArgs.builder()
* .maxTimeElapsedSinceLastModification("600s")
* .excludePrefixes("requests.gz")
* .build())
* .transferOptions(TransferJobTransferSpecTransferOptionsArgs.builder()
* .deleteObjectsUniqueInSink(false)
* .build())
* .awsS3DataSource(TransferJobTransferSpecAwsS3DataSourceArgs.builder()
* .bucketName(awsS3Bucket)
* .awsAccessKey(TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs.builder()
* .accessKeyId(awsAccessKey)
* .secretAccessKey(awsSecretKey)
* .build())
* .build())
* .gcsDataSink(TransferJobTransferSpecGcsDataSinkArgs.builder()
* .bucketName(s3_backup_bucket.name())
* .path("foo/bar/")
* .build())
* .build())
* .schedule(TransferJobScheduleArgs.builder()
* .scheduleStartDate(TransferJobScheduleScheduleStartDateArgs.builder()
* .year(2018)
* .month(10)
* .day(1)
* .build())
* .scheduleEndDate(TransferJobScheduleScheduleEndDateArgs.builder()
* .year(2019)
* .month(1)
* .day(15)
* .build())
* .startTimeOfDay(TransferJobScheduleStartTimeOfDayArgs.builder()
* .hours(23)
* .minutes(30)
* .seconds(0)
* .nanos(0)
* .build())
* .repeatInterval("604800s")
* .build())
* .notificationConfig(TransferJobNotificationConfigArgs.builder()
* .pubsubTopic(topic.id())
* .eventTypes(
* "TRANSFER_OPERATION_SUCCESS",
* "TRANSFER_OPERATION_FAILED")
* .payloadFormat("JSON")
* .build())
* .build(), CustomResourceOptions.builder()
* .dependsOn(
* s3_backup_bucketBucketIAMMember,
* notificationConfig)
* .build());
* }
* }
* ```
* ```yaml
* resources:
* s3-backup-bucket:
* type: gcp:storage:Bucket
* properties:
* name: ${awsS3Bucket}-backup
* storageClass: NEARLINE
* project: ${project}
* location: US
* s3-backup-bucketBucketIAMMember:
* type: gcp:storage:BucketIAMMember
* name: s3-backup-bucket
* properties:
* bucket: ${["s3-backup-bucket"].name}
* role: roles/storage.admin
* member: serviceAccount:${default.email}
* options:
* dependson:
* - ${["s3-backup-bucket"]}
* topic:
* type: gcp:pubsub:Topic
* properties:
* name: ${pubsubTopicName}
* notificationConfig:
* type: gcp:pubsub:TopicIAMMember
* name: notification_config
* properties:
* topic: ${topic.id}
* role: roles/pubsub.publisher
* member: serviceAccount:${default.email}
* s3-bucket-nightly-backup:
* type: gcp:storage:TransferJob
* properties:
* description: Nightly backup of S3 bucket
* project: ${project}
* transferSpec:
* objectConditions:
* maxTimeElapsedSinceLastModification: 600s
* excludePrefixes:
* - requests.gz
* transferOptions:
* deleteObjectsUniqueInSink: false
* awsS3DataSource:
* bucketName: ${awsS3Bucket}
* awsAccessKey:
* accessKeyId: ${awsAccessKey}
* secretAccessKey: ${awsSecretKey}
* gcsDataSink:
* bucketName: ${["s3-backup-bucket"].name}
* path: foo/bar/
* schedule:
* scheduleStartDate:
* year: 2018
* month: 10
* day: 1
* scheduleEndDate:
* year: 2019
* month: 1
* day: 15
* startTimeOfDay:
* hours: 23
* minutes: 30
* seconds: 0
* nanos: 0
* repeatInterval: 604800s
* notificationConfig:
* pubsubTopic: ${topic.id}
* eventTypes:
* - TRANSFER_OPERATION_SUCCESS
* - TRANSFER_OPERATION_FAILED
* payloadFormat: JSON
* options:
* dependson:
* - ${["s3-backup-bucketBucketIAMMember"]}
* - ${notificationConfig}
* variables:
* default:
* fn::invoke:
* Function: gcp:storage:getTransferProjectServiceAccount
* Arguments:
* project: ${project}
* ```
*
* ## Import
* Storage Transfer Jobs can be imported using the Transfer Job's `project` and `name` (without the `transferJob/` prefix), e.g.
* * `{{project_id}}/{{name}}`, where `name` is a numeric value.
* When using the `pulumi import` command, Storage Transfer Jobs can be imported using one of the formats above. For example:
* ```sh
* $ pulumi import gcp:storage/transferJob:TransferJob default {{project_id}}/123456789
* ```
* @property description Unique description to identify the Transfer Job.
* @property eventStream Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
* @property name The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( `transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( `transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with `transferJobs/{{name}}` format, where `name` is a numeric value.
* @property notificationConfig Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
* @property project The project in which the resource belongs. If it
* is not provided, the provider project is used.
* @property schedule Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
* @property status Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.**
* @property transferSpec Transfer specification. Structure documented below.
* - - -
*/
public data class TransferJobArgs(
public val description: Output? = null,
public val eventStream: Output? = null,
public val name: Output? = null,
public val notificationConfig: Output? = null,
public val project: Output? = null,
public val schedule: Output? = null,
public val status: Output? = null,
public val transferSpec: Output? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.gcp.storage.TransferJobArgs =
com.pulumi.gcp.storage.TransferJobArgs.builder()
.description(description?.applyValue({ args0 -> args0 }))
.eventStream(eventStream?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.name(name?.applyValue({ args0 -> args0 }))
.notificationConfig(
notificationConfig?.applyValue({ args0 ->
args0.let({ args0 ->
args0.toJava()
})
}),
)
.project(project?.applyValue({ args0 -> args0 }))
.schedule(schedule?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
.status(status?.applyValue({ args0 -> args0 }))
.transferSpec(transferSpec?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) })).build()
}
/**
* Builder for [TransferJobArgs].
*/
@PulumiTagMarker
public class TransferJobArgsBuilder internal constructor() {
private var description: Output? = null
private var eventStream: Output? = null
private var name: Output? = null
private var notificationConfig: Output? = null
private var project: Output? = null
private var schedule: Output? = null
private var status: Output? = null
private var transferSpec: Output? = null
/**
* @param value Unique description to identify the Transfer Job.
*/
@JvmName("vcecjrxalqslabud")
public suspend fun description(`value`: Output) {
this.description = value
}
/**
* @param value Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
*/
@JvmName("obosnxxqjraufjpi")
public suspend fun eventStream(`value`: Output) {
this.eventStream = value
}
/**
* @param value The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( `transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( `transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with `transferJobs/{{name}}` format, where `name` is a numeric value.
*/
@JvmName("dbtdjfslfximmmae")
public suspend fun name(`value`: Output) {
this.name = value
}
/**
* @param value Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
*/
@JvmName("lugjxknocyvmjoac")
public suspend fun notificationConfig(`value`: Output) {
this.notificationConfig = value
}
/**
* @param value The project in which the resource belongs. If it
* is not provided, the provider project is used.
*/
@JvmName("bhsqipekivleolbe")
public suspend fun project(`value`: Output) {
this.project = value
}
/**
* @param value Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
*/
@JvmName("etkdxqpsoilgjfgx")
public suspend fun schedule(`value`: Output) {
this.schedule = value
}
/**
* @param value Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.**
*/
@JvmName("dcyelyimslimtkfi")
public suspend fun status(`value`: Output) {
this.status = value
}
/**
* @param value Transfer specification. Structure documented below.
* - - -
*/
@JvmName("vgokfbwwsyodnrxk")
public suspend fun transferSpec(`value`: Output) {
this.transferSpec = value
}
/**
* @param value Unique description to identify the Transfer Job.
*/
@JvmName("tnlsbupoosolwqyo")
public suspend fun description(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.description = mapped
}
/**
* @param value Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
*/
@JvmName("teynbrgdneislvxy")
public suspend fun eventStream(`value`: TransferJobEventStreamArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.eventStream = mapped
}
/**
* @param argument Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either `event_stream` or `schedule` must be set.
*/
@JvmName("voxngamsrqvmrekm")
public suspend fun eventStream(argument: suspend TransferJobEventStreamArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobEventStreamArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.eventStream = mapped
}
/**
* @param value The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( `transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( `transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$` ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with `transferJobs/{{name}}` format, where `name` is a numeric value.
*/
@JvmName("vddrxplmnoqjhrin")
public suspend fun name(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.name = mapped
}
/**
* @param value Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
*/
@JvmName("gnengflxfjuuojqs")
public suspend fun notificationConfig(`value`: TransferJobNotificationConfigArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.notificationConfig = mapped
}
/**
* @param argument Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
*/
@JvmName("jfqbqstfnfnyymlb")
public suspend fun notificationConfig(argument: suspend TransferJobNotificationConfigArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobNotificationConfigArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.notificationConfig = mapped
}
/**
* @param value The project in which the resource belongs. If it
* is not provided, the provider project is used.
*/
@JvmName("ranyoqksobwrkfff")
public suspend fun project(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.project = mapped
}
/**
* @param value Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
*/
@JvmName("mfxjnagqbkkbtuet")
public suspend fun schedule(`value`: TransferJobScheduleArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.schedule = mapped
}
/**
* @param argument Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either `schedule` or `event_stream` must be set.
*/
@JvmName("oglhuiumfedeqbbk")
public suspend fun schedule(argument: suspend TransferJobScheduleArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobScheduleArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.schedule = mapped
}
/**
* @param value Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.**
*/
@JvmName("iwhrxuctppulqlbm")
public suspend fun status(`value`: String?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.status = mapped
}
/**
* @param value Transfer specification. Structure documented below.
* - - -
*/
@JvmName("ybmdmseubchehaes")
public suspend fun transferSpec(`value`: TransferJobTransferSpecArgs?) {
val toBeMapped = value
val mapped = toBeMapped?.let({ args0 -> of(args0) })
this.transferSpec = mapped
}
/**
* @param argument Transfer specification. Structure documented below.
* - - -
*/
@JvmName("yissyeiqpypodppr")
public suspend fun transferSpec(argument: suspend TransferJobTransferSpecArgsBuilder.() -> Unit) {
val toBeMapped = TransferJobTransferSpecArgsBuilder().applySuspend { argument() }.build()
val mapped = of(toBeMapped)
this.transferSpec = mapped
}
internal fun build(): TransferJobArgs = TransferJobArgs(
description = description,
eventStream = eventStream,
name = name,
notificationConfig = notificationConfig,
project = project,
schedule = schedule,
status = status,
transferSpec = transferSpec,
)
}