All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.bigquery.kotlin.DataTransferConfigArgs.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.bigquery.kotlin

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.bigquery.DataTransferConfigArgs.builder
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigEmailPreferencesArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigEmailPreferencesArgsBuilder
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigScheduleOptionsArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigScheduleOptionsArgsBuilder
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigSensitiveParamsArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigSensitiveParamsArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Boolean
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * Represents a data transfer configuration. A transfer configuration
 * contains all metadata needed to perform a data transfer.
 * To get more information about Config, see:
 * * [API documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs/create)
 * * How-to Guides
 *     * [Official Documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/)
 * ## Example Usage
 * ### Bigquerydatatransfer Config Scheduled Query
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const project = gcp.organizations.getProject({});
 * const permissions = new gcp.projects.IAMMember("permissions", {
 *     project: project.then(project => project.projectId),
 *     role: "roles/iam.serviceAccountTokenCreator",
 *     member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com`),
 * });
 * const myDataset = new gcp.bigquery.Dataset("my_dataset", {
 *     datasetId: "my_dataset",
 *     friendlyName: "foo",
 *     description: "bar",
 *     location: "asia-northeast1",
 * }, {
 *     dependsOn: [permissions],
 * });
 * const queryConfig = new gcp.bigquery.DataTransferConfig("query_config", {
 *     displayName: "my-query",
 *     location: "asia-northeast1",
 *     dataSourceId: "scheduled_query",
 *     schedule: "first sunday of quarter 00:00",
 *     destinationDatasetId: myDataset.datasetId,
 *     params: {
 *         destination_table_name_template: "my_table",
 *         write_disposition: "WRITE_APPEND",
 *         query: "SELECT name FROM tabl WHERE x = 'y'",
 *     },
 * }, {
 *     dependsOn: [permissions],
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * project = gcp.organizations.get_project()
 * permissions = gcp.projects.IAMMember("permissions",
 *     project=project.project_id,
 *     role="roles/iam.serviceAccountTokenCreator",
 *     member=f"serviceAccount:service-{project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com")
 * my_dataset = gcp.bigquery.Dataset("my_dataset",
 *     dataset_id="my_dataset",
 *     friendly_name="foo",
 *     description="bar",
 *     location="asia-northeast1",
 *     opts = pulumi.ResourceOptions(depends_on=[permissions]))
 * query_config = gcp.bigquery.DataTransferConfig("query_config",
 *     display_name="my-query",
 *     location="asia-northeast1",
 *     data_source_id="scheduled_query",
 *     schedule="first sunday of quarter 00:00",
 *     destination_dataset_id=my_dataset.dataset_id,
 *     params={
 *         "destination_table_name_template": "my_table",
 *         "write_disposition": "WRITE_APPEND",
 *         "query": "SELECT name FROM tabl WHERE x = 'y'",
 *     },
 *     opts = pulumi.ResourceOptions(depends_on=[permissions]))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var project = Gcp.Organizations.GetProject.Invoke();
 *     var permissions = new Gcp.Projects.IAMMember("permissions", new()
 *     {
 *         Project = project.Apply(getProjectResult => getProjectResult.ProjectId),
 *         Role = "roles/iam.serviceAccountTokenCreator",
 *         Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com",
 *     });
 *     var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
 *     {
 *         DatasetId = "my_dataset",
 *         FriendlyName = "foo",
 *         Description = "bar",
 *         Location = "asia-northeast1",
 *     }, new CustomResourceOptions
 *     {
 *         DependsOn =
 *         {
 *             permissions,
 *         },
 *     });
 *     var queryConfig = new Gcp.BigQuery.DataTransferConfig("query_config", new()
 *     {
 *         DisplayName = "my-query",
 *         Location = "asia-northeast1",
 *         DataSourceId = "scheduled_query",
 *         Schedule = "first sunday of quarter 00:00",
 *         DestinationDatasetId = myDataset.DatasetId,
 *         Params =
 *         {
 *             { "destination_table_name_template", "my_table" },
 *             { "write_disposition", "WRITE_APPEND" },
 *             { "query", "SELECT name FROM tabl WHERE x = 'y'" },
 *         },
 *     }, new CustomResourceOptions
 *     {
 *         DependsOn =
 *         {
 *             permissions,
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/organizations"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/projects"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		project, err := organizations.LookupProject(ctx, nil, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		permissions, err := projects.NewIAMMember(ctx, "permissions", &projects.IAMMemberArgs{
 * 			Project: pulumi.String(project.ProjectId),
 * 			Role:    pulumi.String("roles/iam.serviceAccountTokenCreator"),
 * 			Member:  pulumi.Sprintf("serviceAccount:service-%[email protected]", project.Number),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
 * 			DatasetId:    pulumi.String("my_dataset"),
 * 			FriendlyName: pulumi.String("foo"),
 * 			Description:  pulumi.String("bar"),
 * 			Location:     pulumi.String("asia-northeast1"),
 * 		}, pulumi.DependsOn([]pulumi.Resource{
 * 			permissions,
 * 		}))
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = bigquery.NewDataTransferConfig(ctx, "query_config", &bigquery.DataTransferConfigArgs{
 * 			DisplayName:          pulumi.String("my-query"),
 * 			Location:             pulumi.String("asia-northeast1"),
 * 			DataSourceId:         pulumi.String("scheduled_query"),
 * 			Schedule:             pulumi.String("first sunday of quarter 00:00"),
 * 			DestinationDatasetId: myDataset.DatasetId,
 * 			Params: pulumi.StringMap{
 * 				"destination_table_name_template": pulumi.String("my_table"),
 * 				"write_disposition":               pulumi.String("WRITE_APPEND"),
 * 				"query":                           pulumi.String("SELECT name FROM tabl WHERE x = 'y'"),
 * 			},
 * 		}, pulumi.DependsOn([]pulumi.Resource{
 * 			permissions,
 * 		}))
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.organizations.OrganizationsFunctions;
 * import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
 * import com.pulumi.gcp.projects.IAMMember;
 * import com.pulumi.gcp.projects.IAMMemberArgs;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.DataTransferConfig;
 * import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
 * import com.pulumi.resources.CustomResourceOptions;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         final var project = OrganizationsFunctions.getProject();
 *         var permissions = new IAMMember("permissions", IAMMemberArgs.builder()
 *             .project(project.applyValue(getProjectResult -> getProjectResult.projectId()))
 *             .role("roles/iam.serviceAccountTokenCreator")
 *             .member(String.format("serviceAccount:service-%[email protected]", project.applyValue(getProjectResult -> getProjectResult.number())))
 *             .build());
 *         var myDataset = new Dataset("myDataset", DatasetArgs.builder()
 *             .datasetId("my_dataset")
 *             .friendlyName("foo")
 *             .description("bar")
 *             .location("asia-northeast1")
 *             .build(), CustomResourceOptions.builder()
 *                 .dependsOn(permissions)
 *                 .build());
 *         var queryConfig = new DataTransferConfig("queryConfig", DataTransferConfigArgs.builder()
 *             .displayName("my-query")
 *             .location("asia-northeast1")
 *             .dataSourceId("scheduled_query")
 *             .schedule("first sunday of quarter 00:00")
 *             .destinationDatasetId(myDataset.datasetId())
 *             .params(Map.ofEntries(
 *                 Map.entry("destination_table_name_template", "my_table"),
 *                 Map.entry("write_disposition", "WRITE_APPEND"),
 *                 Map.entry("query", "SELECT name FROM tabl WHERE x = 'y'")
 *             ))
 *             .build(), CustomResourceOptions.builder()
 *                 .dependsOn(permissions)
 *                 .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   permissions:
 *     type: gcp:projects:IAMMember
 *     properties:
 *       project: ${project.projectId}
 *       role: roles/iam.serviceAccountTokenCreator
 *       member: serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com
 *   queryConfig:
 *     type: gcp:bigquery:DataTransferConfig
 *     name: query_config
 *     properties:
 *       displayName: my-query
 *       location: asia-northeast1
 *       dataSourceId: scheduled_query
 *       schedule: first sunday of quarter 00:00
 *       destinationDatasetId: ${myDataset.datasetId}
 *       params:
 *         destination_table_name_template: my_table
 *         write_disposition: WRITE_APPEND
 *         query: SELECT name FROM tabl WHERE x = 'y'
 *     options:
 *       dependson:
 *         - ${permissions}
 *   myDataset:
 *     type: gcp:bigquery:Dataset
 *     name: my_dataset
 *     properties:
 *       datasetId: my_dataset
 *       friendlyName: foo
 *       description: bar
 *       location: asia-northeast1
 *     options:
 *       dependson:
 *         - ${permissions}
 * variables:
 *   project:
 *     fn::invoke:
 *       Function: gcp:organizations:getProject
 *       Arguments: {}
 * ```
 * 
 * ### Bigquerydatatransfer Config Salesforce
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const project = gcp.organizations.getProject({});
 * const myDataset = new gcp.bigquery.Dataset("my_dataset", {
 *     datasetId: "my_dataset",
 *     description: "My dataset",
 *     location: "asia-northeast1",
 * });
 * const salesforceConfig = new gcp.bigquery.DataTransferConfig("salesforce_config", {
 *     displayName: "my-salesforce-config",
 *     location: "asia-northeast1",
 *     dataSourceId: "salesforce",
 *     schedule: "first sunday of quarter 00:00",
 *     destinationDatasetId: myDataset.datasetId,
 *     params: {
 *         "connector.authentication.oauth.clientId": "client-id",
 *         "connector.authentication.oauth.clientSecret": "client-secret",
 *         "connector.authentication.username": "username",
 *         "connector.authentication.password": "password",
 *         "connector.authentication.securityToken": "security-token",
 *         assets: "[\"asset-a\",\"asset-b\"]",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * project = gcp.organizations.get_project()
 * my_dataset = gcp.bigquery.Dataset("my_dataset",
 *     dataset_id="my_dataset",
 *     description="My dataset",
 *     location="asia-northeast1")
 * salesforce_config = gcp.bigquery.DataTransferConfig("salesforce_config",
 *     display_name="my-salesforce-config",
 *     location="asia-northeast1",
 *     data_source_id="salesforce",
 *     schedule="first sunday of quarter 00:00",
 *     destination_dataset_id=my_dataset.dataset_id,
 *     params={
 *         "connector.authentication.oauth.clientId": "client-id",
 *         "connector.authentication.oauth.clientSecret": "client-secret",
 *         "connector.authentication.username": "username",
 *         "connector.authentication.password": "password",
 *         "connector.authentication.securityToken": "security-token",
 *         "assets": "[\"asset-a\",\"asset-b\"]",
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var project = Gcp.Organizations.GetProject.Invoke();
 *     var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
 *     {
 *         DatasetId = "my_dataset",
 *         Description = "My dataset",
 *         Location = "asia-northeast1",
 *     });
 *     var salesforceConfig = new Gcp.BigQuery.DataTransferConfig("salesforce_config", new()
 *     {
 *         DisplayName = "my-salesforce-config",
 *         Location = "asia-northeast1",
 *         DataSourceId = "salesforce",
 *         Schedule = "first sunday of quarter 00:00",
 *         DestinationDatasetId = myDataset.DatasetId,
 *         Params =
 *         {
 *             { "connector.authentication.oauth.clientId", "client-id" },
 *             { "connector.authentication.oauth.clientSecret", "client-secret" },
 *             { "connector.authentication.username", "username" },
 *             { "connector.authentication.password", "password" },
 *             { "connector.authentication.securityToken", "security-token" },
 *             { "assets", "[\"asset-a\",\"asset-b\"]" },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/organizations"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := organizations.LookupProject(ctx, nil, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
 * 			DatasetId:   pulumi.String("my_dataset"),
 * 			Description: pulumi.String("My dataset"),
 * 			Location:    pulumi.String("asia-northeast1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = bigquery.NewDataTransferConfig(ctx, "salesforce_config", &bigquery.DataTransferConfigArgs{
 * 			DisplayName:          pulumi.String("my-salesforce-config"),
 * 			Location:             pulumi.String("asia-northeast1"),
 * 			DataSourceId:         pulumi.String("salesforce"),
 * 			Schedule:             pulumi.String("first sunday of quarter 00:00"),
 * 			DestinationDatasetId: myDataset.DatasetId,
 * 			Params: pulumi.StringMap{
 * 				"connector.authentication.oauth.clientId":     pulumi.String("client-id"),
 * 				"connector.authentication.oauth.clientSecret": pulumi.String("client-secret"),
 * 				"connector.authentication.username":           pulumi.String("username"),
 * 				"connector.authentication.password":           pulumi.String("password"),
 * 				"connector.authentication.securityToken":      pulumi.String("security-token"),
 * 				"assets":                                      pulumi.String("[\"asset-a\",\"asset-b\"]"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.organizations.OrganizationsFunctions;
 * import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.DataTransferConfig;
 * import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         final var project = OrganizationsFunctions.getProject();
 *         var myDataset = new Dataset("myDataset", DatasetArgs.builder()
 *             .datasetId("my_dataset")
 *             .description("My dataset")
 *             .location("asia-northeast1")
 *             .build());
 *         var salesforceConfig = new DataTransferConfig("salesforceConfig", DataTransferConfigArgs.builder()
 *             .displayName("my-salesforce-config")
 *             .location("asia-northeast1")
 *             .dataSourceId("salesforce")
 *             .schedule("first sunday of quarter 00:00")
 *             .destinationDatasetId(myDataset.datasetId())
 *             .params(Map.ofEntries(
 *                 Map.entry("connector.authentication.oauth.clientId", "client-id"),
 *                 Map.entry("connector.authentication.oauth.clientSecret", "client-secret"),
 *                 Map.entry("connector.authentication.username", "username"),
 *                 Map.entry("connector.authentication.password", "password"),
 *                 Map.entry("connector.authentication.securityToken", "security-token"),
 *                 Map.entry("assets", "[\"asset-a\",\"asset-b\"]")
 *             ))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   myDataset:
 *     type: gcp:bigquery:Dataset
 *     name: my_dataset
 *     properties:
 *       datasetId: my_dataset
 *       description: My dataset
 *       location: asia-northeast1
 *   salesforceConfig:
 *     type: gcp:bigquery:DataTransferConfig
 *     name: salesforce_config
 *     properties:
 *       displayName: my-salesforce-config
 *       location: asia-northeast1
 *       dataSourceId: salesforce
 *       schedule: first sunday of quarter 00:00
 *       destinationDatasetId: ${myDataset.datasetId}
 *       params:
 *         connector.authentication.oauth.clientId: client-id
 *         connector.authentication.oauth.clientSecret: client-secret
 *         connector.authentication.username: username
 *         connector.authentication.password: password
 *         connector.authentication.securityToken: security-token
 *         assets: '["asset-a","asset-b"]'
 * variables:
 *   project:
 *     fn::invoke:
 *       Function: gcp:organizations:getProject
 *       Arguments: {}
 * ```
 * 
 * ## Import
 * Config can be imported using any of these accepted formats:
 * * `{{name}}`
 * When using the `pulumi import` command, Config can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:bigquery/dataTransferConfig:DataTransferConfig default {{name}}
 * ```
 * @property dataRefreshWindowDays The number of days to look back to automatically refresh the data.
 * For example, if dataRefreshWindowDays = 10, then every day BigQuery
 * reingests data for [today-10, today-1], rather than ingesting data for
 * just [today-1]. Only valid if the data source supports the feature.
 * Set the value to 0 to use the default value.
 * @property dataSourceId The data source id. Cannot be changed once the transfer config is created.
 * @property destinationDatasetId The BigQuery target dataset id.
 * @property disabled When set to true, no runs are scheduled for a given transfer.
 * @property displayName The user specified display name for the transfer config.
 * @property emailPreferences Email notifications will be sent according to these preferences to the
 * email address of the user who owns this transfer config.
 * Structure is documented below.
 * @property location The geographic location where the transfer config should reside.
 * Examples: US, EU, asia-northeast1. The default value is US.
 * @property notificationPubsubTopic Pub/Sub topic where notifications will be sent after transfer runs
 * associated with this transfer config finish.
 * @property params Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
 * section for each data source. For example the parameters for Cloud Storage transfers are listed here:
 * https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
 * **NOTE** : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.
 * - - -
 * @property project The ID of the project in which the resource belongs.
 * If it is not provided, the provider project is used.
 * @property schedule Data transfer schedule. If the data source does not support a custom
 * schedule, this should be empty. If it is empty, the default value for
 * the data source will be used. The specified times are in UTC. Examples
 * of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
 * jun 13:15, and first sunday of quarter 00:00. See more explanation
 * about the format here:
 * https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
 * NOTE: The minimum interval time between recurring transfers depends
 * on the data source; refer to the documentation for your data source.
 * @property scheduleOptions Options customizing the data transfer schedule.
 * Structure is documented below.
 * @property sensitiveParams Different parameters are configured primarily using the the `params` field on this
 * resource. This block contains the parameters which contain secrets or passwords so that they can be marked
 * sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key
 * in the `params` map in the api request.
 * Credentials may not be specified in both locations and will cause an error. Changing from one location
 * to a different credential configuration in the config will require an apply to update state.
 * Structure is documented below.
 * @property serviceAccountName Service account email. If this field is set, transfer config will
 * be created with this service account credentials. It requires that
 * requesting user calling this API has permissions to act as this service account.
 */
public data class DataTransferConfigArgs(
    public val dataRefreshWindowDays: Output? = null,
    public val dataSourceId: Output? = null,
    public val destinationDatasetId: Output? = null,
    public val disabled: Output? = null,
    public val displayName: Output? = null,
    public val emailPreferences: Output? = null,
    public val location: Output? = null,
    public val notificationPubsubTopic: Output? = null,
    public val params: Output>? = null,
    public val project: Output? = null,
    public val schedule: Output? = null,
    public val scheduleOptions: Output? = null,
    public val sensitiveParams: Output? = null,
    public val serviceAccountName: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.bigquery.DataTransferConfigArgs =
        com.pulumi.gcp.bigquery.DataTransferConfigArgs.builder()
            .dataRefreshWindowDays(dataRefreshWindowDays?.applyValue({ args0 -> args0 }))
            .dataSourceId(dataSourceId?.applyValue({ args0 -> args0 }))
            .destinationDatasetId(destinationDatasetId?.applyValue({ args0 -> args0 }))
            .disabled(disabled?.applyValue({ args0 -> args0 }))
            .displayName(displayName?.applyValue({ args0 -> args0 }))
            .emailPreferences(emailPreferences?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .location(location?.applyValue({ args0 -> args0 }))
            .notificationPubsubTopic(notificationPubsubTopic?.applyValue({ args0 -> args0 }))
            .params(params?.applyValue({ args0 -> args0.map({ args0 -> args0.key.to(args0.value) }).toMap() }))
            .project(project?.applyValue({ args0 -> args0 }))
            .schedule(schedule?.applyValue({ args0 -> args0 }))
            .scheduleOptions(scheduleOptions?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .sensitiveParams(sensitiveParams?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .serviceAccountName(serviceAccountName?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [DataTransferConfigArgs].
 */
@PulumiTagMarker
public class DataTransferConfigArgsBuilder internal constructor() {
    private var dataRefreshWindowDays: Output? = null

    private var dataSourceId: Output? = null

    private var destinationDatasetId: Output? = null

    private var disabled: Output? = null

    private var displayName: Output? = null

    private var emailPreferences: Output? = null

    private var location: Output? = null

    private var notificationPubsubTopic: Output? = null

    private var params: Output>? = null

    private var project: Output? = null

    private var schedule: Output? = null

    private var scheduleOptions: Output? = null

    private var sensitiveParams: Output? = null

    private var serviceAccountName: Output? = null

    /**
     * @param value The number of days to look back to automatically refresh the data.
     * For example, if dataRefreshWindowDays = 10, then every day BigQuery
     * reingests data for [today-10, today-1], rather than ingesting data for
     * just [today-1]. Only valid if the data source supports the feature.
     * Set the value to 0 to use the default value.
     */
    @JvmName("kbqepocldqfsyvqy")
    public suspend fun dataRefreshWindowDays(`value`: Output) {
        this.dataRefreshWindowDays = value
    }

    /**
     * @param value The data source id. Cannot be changed once the transfer config is created.
     */
    @JvmName("vveclwvhonkyjjty")
    public suspend fun dataSourceId(`value`: Output) {
        this.dataSourceId = value
    }

    /**
     * @param value The BigQuery target dataset id.
     */
    @JvmName("iwwcaejqlbmmuxnb")
    public suspend fun destinationDatasetId(`value`: Output) {
        this.destinationDatasetId = value
    }

    /**
     * @param value When set to true, no runs are scheduled for a given transfer.
     */
    @JvmName("yktefjbnncyqafpu")
    public suspend fun disabled(`value`: Output) {
        this.disabled = value
    }

    /**
     * @param value The user specified display name for the transfer config.
     */
    @JvmName("xfjsckriroflqlgd")
    public suspend fun displayName(`value`: Output) {
        this.displayName = value
    }

    /**
     * @param value Email notifications will be sent according to these preferences to the
     * email address of the user who owns this transfer config.
     * Structure is documented below.
     */
    @JvmName("qntbkwnfdlxqfrlq")
    public suspend fun emailPreferences(`value`: Output) {
        this.emailPreferences = value
    }

    /**
     * @param value The geographic location where the transfer config should reside.
     * Examples: US, EU, asia-northeast1. The default value is US.
     */
    @JvmName("vspgvuuiayxwqgkr")
    public suspend fun location(`value`: Output) {
        this.location = value
    }

    /**
     * @param value Pub/Sub topic where notifications will be sent after transfer runs
     * associated with this transfer config finish.
     */
    @JvmName("yuvfapsxgxnqqfbf")
    public suspend fun notificationPubsubTopic(`value`: Output) {
        this.notificationPubsubTopic = value
    }

    /**
     * @param value Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
     * section for each data source. For example the parameters for Cloud Storage transfers are listed here:
     * https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
     * **NOTE** : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.
     * - - -
     */
    @JvmName("voipybxptrkwvlhq")
    public suspend fun params(`value`: Output>) {
        this.params = value
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("skbcrlvoltimmfwo")
    public suspend fun project(`value`: Output) {
        this.project = value
    }

    /**
     * @param value Data transfer schedule. If the data source does not support a custom
     * schedule, this should be empty. If it is empty, the default value for
     * the data source will be used. The specified times are in UTC. Examples
     * of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
     * jun 13:15, and first sunday of quarter 00:00. See more explanation
     * about the format here:
     * https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
     * NOTE: The minimum interval time between recurring transfers depends
     * on the data source; refer to the documentation for your data source.
     */
    @JvmName("msldtpmuewdulilc")
    public suspend fun schedule(`value`: Output) {
        this.schedule = value
    }

    /**
     * @param value Options customizing the data transfer schedule.
     * Structure is documented below.
     */
    @JvmName("npcrcvodnvtwolna")
    public suspend fun scheduleOptions(`value`: Output) {
        this.scheduleOptions = value
    }

    /**
     * @param value Different parameters are configured primarily using the the `params` field on this
     * resource. This block contains the parameters which contain secrets or passwords so that they can be marked
     * sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key
     * in the `params` map in the api request.
     * Credentials may not be specified in both locations and will cause an error. Changing from one location
     * to a different credential configuration in the config will require an apply to update state.
     * Structure is documented below.
     */
    @JvmName("mhyktvavdkhrqwlj")
    public suspend fun sensitiveParams(`value`: Output) {
        this.sensitiveParams = value
    }

    /**
     * @param value Service account email. If this field is set, transfer config will
     * be created with this service account credentials. It requires that
     * requesting user calling this API has permissions to act as this service account.
     */
    @JvmName("bormgxfuvpuddjvu")
    public suspend fun serviceAccountName(`value`: Output) {
        this.serviceAccountName = value
    }

    /**
     * @param value The number of days to look back to automatically refresh the data.
     * For example, if dataRefreshWindowDays = 10, then every day BigQuery
     * reingests data for [today-10, today-1], rather than ingesting data for
     * just [today-1]. Only valid if the data source supports the feature.
     * Set the value to 0 to use the default value.
     */
    @JvmName("stwbsvhrkvnskvtm")
    public suspend fun dataRefreshWindowDays(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dataRefreshWindowDays = mapped
    }

    /**
     * @param value The data source id. Cannot be changed once the transfer config is created.
     */
    @JvmName("pjkldoicbrjnbfqo")
    public suspend fun dataSourceId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dataSourceId = mapped
    }

    /**
     * @param value The BigQuery target dataset id.
     */
    @JvmName("mcyvksoprsfhjnir")
    public suspend fun destinationDatasetId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.destinationDatasetId = mapped
    }

    /**
     * @param value When set to true, no runs are scheduled for a given transfer.
     */
    @JvmName("nsetwohyhhhjduru")
    public suspend fun disabled(`value`: Boolean?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.disabled = mapped
    }

    /**
     * @param value The user specified display name for the transfer config.
     */
    @JvmName("kxgtcrwtdlqjddon")
    public suspend fun displayName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.displayName = mapped
    }

    /**
     * @param value Email notifications will be sent according to these preferences to the
     * email address of the user who owns this transfer config.
     * Structure is documented below.
     */
    @JvmName("hotsdoylhlhacodt")
    public suspend fun emailPreferences(`value`: DataTransferConfigEmailPreferencesArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.emailPreferences = mapped
    }

    /**
     * @param argument Email notifications will be sent according to these preferences to the
     * email address of the user who owns this transfer config.
     * Structure is documented below.
     */
    @JvmName("vfgptqttcrecvgjt")
    public suspend fun emailPreferences(argument: suspend DataTransferConfigEmailPreferencesArgsBuilder.() -> Unit) {
        val toBeMapped = DataTransferConfigEmailPreferencesArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.emailPreferences = mapped
    }

    /**
     * @param value The geographic location where the transfer config should reside.
     * Examples: US, EU, asia-northeast1. The default value is US.
     */
    @JvmName("bwvqchtgrvabtwjo")
    public suspend fun location(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.location = mapped
    }

    /**
     * @param value Pub/Sub topic where notifications will be sent after transfer runs
     * associated with this transfer config finish.
     */
    @JvmName("mrwnwhrqwojvctlq")
    public suspend fun notificationPubsubTopic(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.notificationPubsubTopic = mapped
    }

    /**
     * @param value Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
     * section for each data source. For example the parameters for Cloud Storage transfers are listed here:
     * https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
     * **NOTE** : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.
     * - - -
     */
    @JvmName("vfxtwxrpvcofbmha")
    public suspend fun params(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.params = mapped
    }

    /**
     * @param values Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
     * section for each data source. For example the parameters for Cloud Storage transfers are listed here:
     * https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
     * **NOTE** : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.
     * - - -
     */
    @JvmName("qrsquimewqmjcglb")
    public fun params(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.params = mapped
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("awomeqwcbhsoamyw")
    public suspend fun project(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.project = mapped
    }

    /**
     * @param value Data transfer schedule. If the data source does not support a custom
     * schedule, this should be empty. If it is empty, the default value for
     * the data source will be used. The specified times are in UTC. Examples
     * of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
     * jun 13:15, and first sunday of quarter 00:00. See more explanation
     * about the format here:
     * https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
     * NOTE: The minimum interval time between recurring transfers depends
     * on the data source; refer to the documentation for your data source.
     */
    @JvmName("abvdgqbybticoyfc")
    public suspend fun schedule(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.schedule = mapped
    }

    /**
     * @param value Options customizing the data transfer schedule.
     * Structure is documented below.
     */
    @JvmName("ntkrajwawicdahmf")
    public suspend fun scheduleOptions(`value`: DataTransferConfigScheduleOptionsArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.scheduleOptions = mapped
    }

    /**
     * @param argument Options customizing the data transfer schedule.
     * Structure is documented below.
     */
    @JvmName("rbioecfgiaoocwum")
    public suspend fun scheduleOptions(argument: suspend DataTransferConfigScheduleOptionsArgsBuilder.() -> Unit) {
        val toBeMapped = DataTransferConfigScheduleOptionsArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.scheduleOptions = mapped
    }

    /**
     * @param value Different parameters are configured primarily using the the `params` field on this
     * resource. This block contains the parameters which contain secrets or passwords so that they can be marked
     * sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key
     * in the `params` map in the api request.
     * Credentials may not be specified in both locations and will cause an error. Changing from one location
     * to a different credential configuration in the config will require an apply to update state.
     * Structure is documented below.
     */
    @JvmName("kiqctfleqsqxtrly")
    public suspend fun sensitiveParams(`value`: DataTransferConfigSensitiveParamsArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.sensitiveParams = mapped
    }

    /**
     * @param argument Different parameters are configured primarily using the the `params` field on this
     * resource. This block contains the parameters which contain secrets or passwords so that they can be marked
     * sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key
     * in the `params` map in the api request.
     * Credentials may not be specified in both locations and will cause an error. Changing from one location
     * to a different credential configuration in the config will require an apply to update state.
     * Structure is documented below.
     */
    @JvmName("rvniuhdvmihqwftj")
    public suspend fun sensitiveParams(argument: suspend DataTransferConfigSensitiveParamsArgsBuilder.() -> Unit) {
        val toBeMapped = DataTransferConfigSensitiveParamsArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.sensitiveParams = mapped
    }

    /**
     * @param value Service account email. If this field is set, transfer config will
     * be created with this service account credentials. It requires that
     * requesting user calling this API has permissions to act as this service account.
     */
    @JvmName("jrttjathrkvoujbx")
    public suspend fun serviceAccountName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.serviceAccountName = mapped
    }

    internal fun build(): DataTransferConfigArgs = DataTransferConfigArgs(
        dataRefreshWindowDays = dataRefreshWindowDays,
        dataSourceId = dataSourceId,
        destinationDatasetId = destinationDatasetId,
        disabled = disabled,
        displayName = displayName,
        emailPreferences = emailPreferences,
        location = location,
        notificationPubsubTopic = notificationPubsubTopic,
        params = params,
        project = project,
        schedule = schedule,
        scheduleOptions = scheduleOptions,
        sensitiveParams = sensitiveParams,
        serviceAccountName = serviceAccountName,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy