com.pulumi.gcp.bigquery.kotlin.DataTransferConfigArgs.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-gcp-kotlin Show documentation
Show all versions of pulumi-gcp-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.gcp.bigquery.kotlin
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.bigquery.DataTransferConfigArgs.builder
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigEmailPreferencesArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigEmailPreferencesArgsBuilder
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigScheduleOptionsArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigScheduleOptionsArgsBuilder
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigSensitiveParamsArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DataTransferConfigSensitiveParamsArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Boolean
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.Map
import kotlin.jvm.JvmName
/**
* Represents a data transfer configuration. A transfer configuration
* contains all metadata needed to perform a data transfer.
* To get more information about Config, see:
* * [API documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs/create)
* * How-to Guides
* * [Official Documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/)
* ## Example Usage
* ### Bigquerydatatransfer Config Scheduled Query
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
* const project = gcp.organizations.getProject({});
* const permissions = new gcp.projects.IAMMember("permissions", {
* project: project.then(project => project.projectId),
* role: "roles/iam.serviceAccountTokenCreator",
* member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com`),
* });
* const myDataset = new gcp.bigquery.Dataset("my_dataset", {
* datasetId: "my_dataset",
* friendlyName: "foo",
* description: "bar",
* location: "asia-northeast1",
* });
* const queryConfig = new gcp.bigquery.DataTransferConfig("query_config", {
* displayName: "my-query",
* location: "asia-northeast1",
* dataSourceId: "scheduled_query",
* schedule: "first sunday of quarter 00:00",
* destinationDatasetId: myDataset.datasetId,
* params: {
* destination_table_name_template: "my_table",
* write_disposition: "WRITE_APPEND",
* query: "SELECT name FROM tabl WHERE x = 'y'",
* },
* });
* ```
* ```python
* import pulumi
* import pulumi_gcp as gcp
* project = gcp.organizations.get_project()
* permissions = gcp.projects.IAMMember("permissions",
* project=project.project_id,
* role="roles/iam.serviceAccountTokenCreator",
* member=f"serviceAccount:service-{project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com")
* my_dataset = gcp.bigquery.Dataset("my_dataset",
* dataset_id="my_dataset",
* friendly_name="foo",
* description="bar",
* location="asia-northeast1")
* query_config = gcp.bigquery.DataTransferConfig("query_config",
* display_name="my-query",
* location="asia-northeast1",
* data_source_id="scheduled_query",
* schedule="first sunday of quarter 00:00",
* destination_dataset_id=my_dataset.dataset_id,
* params={
* "destination_table_name_template": "my_table",
* "write_disposition": "WRITE_APPEND",
* "query": "SELECT name FROM tabl WHERE x = 'y'",
* })
* ```
* ```csharp
* using System.Collections.Generic;
* using System.Linq;
* using Pulumi;
* using Gcp = Pulumi.Gcp;
* return await Deployment.RunAsync(() =>
* {
* var project = Gcp.Organizations.GetProject.Invoke();
* var permissions = new Gcp.Projects.IAMMember("permissions", new()
* {
* Project = project.Apply(getProjectResult => getProjectResult.ProjectId),
* Role = "roles/iam.serviceAccountTokenCreator",
* Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com",
* });
* var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
* {
* DatasetId = "my_dataset",
* FriendlyName = "foo",
* Description = "bar",
* Location = "asia-northeast1",
* });
* var queryConfig = new Gcp.BigQuery.DataTransferConfig("query_config", new()
* {
* DisplayName = "my-query",
* Location = "asia-northeast1",
* DataSourceId = "scheduled_query",
* Schedule = "first sunday of quarter 00:00",
* DestinationDatasetId = myDataset.DatasetId,
* Params =
* {
* { "destination_table_name_template", "my_table" },
* { "write_disposition", "WRITE_APPEND" },
* { "query", "SELECT name FROM tabl WHERE x = 'y'" },
* },
* });
* });
* ```
* ```go
* package main
* import (
* "fmt"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/organizations"
* "github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/projects"
* "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
* )
* func main() {
* pulumi.Run(func(ctx *pulumi.Context) error {
* project, err := organizations.LookupProject(ctx, nil, nil)
* if err != nil {
* return err
* }
* _, err = projects.NewIAMMember(ctx, "permissions", &projects.IAMMemberArgs{
* Project: pulumi.String(project.ProjectId),
* Role: pulumi.String("roles/iam.serviceAccountTokenCreator"),
* Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%[email protected]", project.Number)),
* })
* if err != nil {
* return err
* }
* myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
* DatasetId: pulumi.String("my_dataset"),
* FriendlyName: pulumi.String("foo"),
* Description: pulumi.String("bar"),
* Location: pulumi.String("asia-northeast1"),
* })
* if err != nil {
* return err
* }
* _, err = bigquery.NewDataTransferConfig(ctx, "query_config", &bigquery.DataTransferConfigArgs{
* DisplayName: pulumi.String("my-query"),
* Location: pulumi.String("asia-northeast1"),
* DataSourceId: pulumi.String("scheduled_query"),
* Schedule: pulumi.String("first sunday of quarter 00:00"),
* DestinationDatasetId: myDataset.DatasetId,
* Params: pulumi.StringMap{
* "destination_table_name_template": pulumi.String("my_table"),
* "write_disposition": pulumi.String("WRITE_APPEND"),
* "query": pulumi.String("SELECT name FROM tabl WHERE x = 'y'"),
* },
* })
* if err != nil {
* return err
* }
* return nil
* })
* }
* ```
* ```java
* package generated_program;
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.gcp.organizations.OrganizationsFunctions;
* import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
* import com.pulumi.gcp.projects.IAMMember;
* import com.pulumi.gcp.projects.IAMMemberArgs;
* import com.pulumi.gcp.bigquery.Dataset;
* import com.pulumi.gcp.bigquery.DatasetArgs;
* import com.pulumi.gcp.bigquery.DataTransferConfig;
* import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
* public static void stack(Context ctx) {
* final var project = OrganizationsFunctions.getProject();
* var permissions = new IAMMember("permissions", IAMMemberArgs.builder()
* .project(project.applyValue(getProjectResult -> getProjectResult.projectId()))
* .role("roles/iam.serviceAccountTokenCreator")
* .member(String.format("serviceAccount:service-%[email protected]", project.applyValue(getProjectResult -> getProjectResult.number())))
* .build());
* var myDataset = new Dataset("myDataset", DatasetArgs.builder()
* .datasetId("my_dataset")
* .friendlyName("foo")
* .description("bar")
* .location("asia-northeast1")
* .build());
* var queryConfig = new DataTransferConfig("queryConfig", DataTransferConfigArgs.builder()
* .displayName("my-query")
* .location("asia-northeast1")
* .dataSourceId("scheduled_query")
* .schedule("first sunday of quarter 00:00")
* .destinationDatasetId(myDataset.datasetId())
* .params(Map.ofEntries(
* Map.entry("destination_table_name_template", "my_table"),
* Map.entry("write_disposition", "WRITE_APPEND"),
* Map.entry("query", "SELECT name FROM tabl WHERE x = 'y'")
* ))
* .build());
* }
* }
* ```
* ```yaml
* resources:
* permissions:
* type: gcp:projects:IAMMember
* properties:
* project: ${project.projectId}
* role: roles/iam.serviceAccountTokenCreator
* member: serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com
* queryConfig:
* type: gcp:bigquery:DataTransferConfig
* name: query_config
* properties:
* displayName: my-query
* location: asia-northeast1
* dataSourceId: scheduled_query
* schedule: first sunday of quarter 00:00
* destinationDatasetId: ${myDataset.datasetId}
* params:
* destination_table_name_template: my_table
* write_disposition: WRITE_APPEND
* query: SELECT name FROM tabl WHERE x = 'y'
* myDataset:
* type: gcp:bigquery:Dataset
* name: my_dataset
* properties:
* datasetId: my_dataset
* friendlyName: foo
* description: bar
* location: asia-northeast1
* variables:
* project:
* fn::invoke:
* Function: gcp:organizations:getProject
* Arguments: {}
* ```
*
* ## Import
* Config can be imported using any of these accepted formats:
* * `{{name}}`
* When using the `pulumi import` command, Config can be imported using one of the formats above. For example:
* ```sh
* $ pulumi import gcp:bigquery/dataTransferConfig:DataTransferConfig default {{name}}
* ```
* @property dataRefreshWindowDays The number of days to look back to automatically refresh the data.
* For example, if dataRefreshWindowDays = 10, then every day BigQuery
* reingests data for [today-10, today-1], rather than ingesting data for
* just [today-1]. Only valid if the data source supports the feature.
* Set the value to 0 to use the default value.
* @property dataSourceId The data source id. Cannot be changed once the transfer config is created.
* @property destinationDatasetId The BigQuery target dataset id.
* @property disabled When set to true, no runs are scheduled for a given transfer.
* @property displayName The user specified display name for the transfer config.
* @property emailPreferences Email notifications will be sent according to these preferences to the
* email address of the user who owns this transfer config.
* Structure is documented below.
* @property location The geographic location where the transfer config should reside.
* Examples: US, EU, asia-northeast1. The default value is US.
* @property notificationPubsubTopic Pub/Sub topic where notifications will be sent after transfer runs
* associated with this transfer config finish.
* @property params Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
* section for each data source. For example the parameters for Cloud Storage transfers are listed here:
* https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
* **NOTE** : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.
* - - -
* @property project The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
* @property schedule Data transfer schedule. If the data source does not support a custom
* schedule, this should be empty. If it is empty, the default value for
* the data source will be used. The specified times are in UTC. Examples
* of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
* jun 13:15, and first sunday of quarter 00:00. See more explanation
* about the format here:
* https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
* NOTE: the granularity should be at least 8 hours, or less frequent.
* @property scheduleOptions Options customizing the data transfer schedule.
* Structure is documented below.
* @property sensitiveParams Different parameters are configured primarily using the the `params` field on this
* resource. This block contains the parameters which contain secrets or passwords so that they can be marked
* sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key
* in the `params` map in the api request.
* Credentials may not be specified in both locations and will cause an error. Changing from one location
* to a different credential configuration in the config will require an apply to update state.
* Structure is documented below.
* @property serviceAccountName Service account email. If this field is set, transfer config will
* be created with this service account credentials. It requires that
* requesting user calling this API has permissions to act as this service account.
*/
public data class DataTransferConfigArgs(
public val dataRefreshWindowDays: Output? = null,
public val dataSourceId: Output? = null,
public val destinationDatasetId: Output? = null,
public val disabled: Output? = null,
public val displayName: Output? = null,
public val emailPreferences: Output? = null,
public val location: Output? = null,
public val notificationPubsubTopic: Output? = null,
public val params: Output
© 2015 - 2024 Weber Informatics LLC | Privacy Policy