All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.bigquery.kotlin.DataTransferConfig.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.bigquery.kotlin

import com.pulumi.core.Output
import com.pulumi.gcp.bigquery.kotlin.outputs.DataTransferConfigEmailPreferences
import com.pulumi.gcp.bigquery.kotlin.outputs.DataTransferConfigScheduleOptions
import com.pulumi.gcp.bigquery.kotlin.outputs.DataTransferConfigSensitiveParams
import com.pulumi.kotlin.KotlinCustomResource
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.ResourceMapper
import com.pulumi.kotlin.options.CustomResourceOptions
import com.pulumi.kotlin.options.CustomResourceOptionsBuilder
import com.pulumi.resources.Resource
import kotlin.Boolean
import kotlin.Int
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.Map
import com.pulumi.gcp.bigquery.kotlin.outputs.DataTransferConfigEmailPreferences.Companion.toKotlin as dataTransferConfigEmailPreferencesToKotlin
import com.pulumi.gcp.bigquery.kotlin.outputs.DataTransferConfigScheduleOptions.Companion.toKotlin as dataTransferConfigScheduleOptionsToKotlin
import com.pulumi.gcp.bigquery.kotlin.outputs.DataTransferConfigSensitiveParams.Companion.toKotlin as dataTransferConfigSensitiveParamsToKotlin

/**
 * Builder for [DataTransferConfig].
 */
@PulumiTagMarker
public class DataTransferConfigResourceBuilder internal constructor() {
    public var name: String? = null

    public var args: DataTransferConfigArgs = DataTransferConfigArgs()

    public var opts: CustomResourceOptions = CustomResourceOptions()

    /**
     * @param name The _unique_ name of the resulting resource.
     */
    public fun name(`value`: String) {
        this.name = value
    }

    /**
     * @param block The arguments to use to populate this resource's properties.
     */
    public suspend fun args(block: suspend DataTransferConfigArgsBuilder.() -> Unit) {
        val builder = DataTransferConfigArgsBuilder()
        block(builder)
        this.args = builder.build()
    }

    /**
     * @param block A bag of options that control this resource's behavior.
     */
    public suspend fun opts(block: suspend CustomResourceOptionsBuilder.() -> Unit) {
        this.opts = com.pulumi.kotlin.options.CustomResourceOptions.opts(block)
    }

    internal fun build(): DataTransferConfig {
        val builtJavaResource = com.pulumi.gcp.bigquery.DataTransferConfig(
            this.name,
            this.args.toJava(),
            this.opts.toJava(),
        )
        return DataTransferConfig(builtJavaResource)
    }
}

/**
 * Represents a data transfer configuration. A transfer configuration
 * contains all metadata needed to perform a data transfer.
 * To get more information about Config, see:
 * * [API documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs/create)
 * * How-to Guides
 *     * [Official Documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/)
 * ## Example Usage
 * ### Bigquerydatatransfer Config Scheduled Query
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const project = gcp.organizations.getProject({});
 * const permissions = new gcp.projects.IAMMember("permissions", {
 *     project: project.then(project => project.projectId),
 *     role: "roles/iam.serviceAccountTokenCreator",
 *     member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com`),
 * });
 * const myDataset = new gcp.bigquery.Dataset("my_dataset", {
 *     datasetId: "my_dataset",
 *     friendlyName: "foo",
 *     description: "bar",
 *     location: "asia-northeast1",
 * });
 * const queryConfig = new gcp.bigquery.DataTransferConfig("query_config", {
 *     displayName: "my-query",
 *     location: "asia-northeast1",
 *     dataSourceId: "scheduled_query",
 *     schedule: "first sunday of quarter 00:00",
 *     destinationDatasetId: myDataset.datasetId,
 *     params: {
 *         destination_table_name_template: "my_table",
 *         write_disposition: "WRITE_APPEND",
 *         query: "SELECT name FROM tabl WHERE x = 'y'",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * project = gcp.organizations.get_project()
 * permissions = gcp.projects.IAMMember("permissions",
 *     project=project.project_id,
 *     role="roles/iam.serviceAccountTokenCreator",
 *     member=f"serviceAccount:service-{project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com")
 * my_dataset = gcp.bigquery.Dataset("my_dataset",
 *     dataset_id="my_dataset",
 *     friendly_name="foo",
 *     description="bar",
 *     location="asia-northeast1")
 * query_config = gcp.bigquery.DataTransferConfig("query_config",
 *     display_name="my-query",
 *     location="asia-northeast1",
 *     data_source_id="scheduled_query",
 *     schedule="first sunday of quarter 00:00",
 *     destination_dataset_id=my_dataset.dataset_id,
 *     params={
 *         "destination_table_name_template": "my_table",
 *         "write_disposition": "WRITE_APPEND",
 *         "query": "SELECT name FROM tabl WHERE x = 'y'",
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var project = Gcp.Organizations.GetProject.Invoke();
 *     var permissions = new Gcp.Projects.IAMMember("permissions", new()
 *     {
 *         Project = project.Apply(getProjectResult => getProjectResult.ProjectId),
 *         Role = "roles/iam.serviceAccountTokenCreator",
 *         Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com",
 *     });
 *     var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
 *     {
 *         DatasetId = "my_dataset",
 *         FriendlyName = "foo",
 *         Description = "bar",
 *         Location = "asia-northeast1",
 *     });
 *     var queryConfig = new Gcp.BigQuery.DataTransferConfig("query_config", new()
 *     {
 *         DisplayName = "my-query",
 *         Location = "asia-northeast1",
 *         DataSourceId = "scheduled_query",
 *         Schedule = "first sunday of quarter 00:00",
 *         DestinationDatasetId = myDataset.DatasetId,
 *         Params =
 *         {
 *             { "destination_table_name_template", "my_table" },
 *             { "write_disposition", "WRITE_APPEND" },
 *             { "query", "SELECT name FROM tabl WHERE x = 'y'" },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/organizations"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/projects"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		project, err := organizations.LookupProject(ctx, nil, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = projects.NewIAMMember(ctx, "permissions", &projects.IAMMemberArgs{
 * 			Project: pulumi.String(project.ProjectId),
 * 			Role:    pulumi.String("roles/iam.serviceAccountTokenCreator"),
 * 			Member:  pulumi.String(fmt.Sprintf("serviceAccount:service-%[email protected]", project.Number)),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
 * 			DatasetId:    pulumi.String("my_dataset"),
 * 			FriendlyName: pulumi.String("foo"),
 * 			Description:  pulumi.String("bar"),
 * 			Location:     pulumi.String("asia-northeast1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = bigquery.NewDataTransferConfig(ctx, "query_config", &bigquery.DataTransferConfigArgs{
 * 			DisplayName:          pulumi.String("my-query"),
 * 			Location:             pulumi.String("asia-northeast1"),
 * 			DataSourceId:         pulumi.String("scheduled_query"),
 * 			Schedule:             pulumi.String("first sunday of quarter 00:00"),
 * 			DestinationDatasetId: myDataset.DatasetId,
 * 			Params: pulumi.StringMap{
 * 				"destination_table_name_template": pulumi.String("my_table"),
 * 				"write_disposition":               pulumi.String("WRITE_APPEND"),
 * 				"query":                           pulumi.String("SELECT name FROM tabl WHERE x = 'y'"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.organizations.OrganizationsFunctions;
 * import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
 * import com.pulumi.gcp.projects.IAMMember;
 * import com.pulumi.gcp.projects.IAMMemberArgs;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.DataTransferConfig;
 * import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         final var project = OrganizationsFunctions.getProject();
 *         var permissions = new IAMMember("permissions", IAMMemberArgs.builder()
 *             .project(project.applyValue(getProjectResult -> getProjectResult.projectId()))
 *             .role("roles/iam.serviceAccountTokenCreator")
 *             .member(String.format("serviceAccount:service-%[email protected]", project.applyValue(getProjectResult -> getProjectResult.number())))
 *             .build());
 *         var myDataset = new Dataset("myDataset", DatasetArgs.builder()
 *             .datasetId("my_dataset")
 *             .friendlyName("foo")
 *             .description("bar")
 *             .location("asia-northeast1")
 *             .build());
 *         var queryConfig = new DataTransferConfig("queryConfig", DataTransferConfigArgs.builder()
 *             .displayName("my-query")
 *             .location("asia-northeast1")
 *             .dataSourceId("scheduled_query")
 *             .schedule("first sunday of quarter 00:00")
 *             .destinationDatasetId(myDataset.datasetId())
 *             .params(Map.ofEntries(
 *                 Map.entry("destination_table_name_template", "my_table"),
 *                 Map.entry("write_disposition", "WRITE_APPEND"),
 *                 Map.entry("query", "SELECT name FROM tabl WHERE x = 'y'")
 *             ))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   permissions:
 *     type: gcp:projects:IAMMember
 *     properties:
 *       project: ${project.projectId}
 *       role: roles/iam.serviceAccountTokenCreator
 *       member: serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com
 *   queryConfig:
 *     type: gcp:bigquery:DataTransferConfig
 *     name: query_config
 *     properties:
 *       displayName: my-query
 *       location: asia-northeast1
 *       dataSourceId: scheduled_query
 *       schedule: first sunday of quarter 00:00
 *       destinationDatasetId: ${myDataset.datasetId}
 *       params:
 *         destination_table_name_template: my_table
 *         write_disposition: WRITE_APPEND
 *         query: SELECT name FROM tabl WHERE x = 'y'
 *   myDataset:
 *     type: gcp:bigquery:Dataset
 *     name: my_dataset
 *     properties:
 *       datasetId: my_dataset
 *       friendlyName: foo
 *       description: bar
 *       location: asia-northeast1
 * variables:
 *   project:
 *     fn::invoke:
 *       Function: gcp:organizations:getProject
 *       Arguments: {}
 * ```
 * 
 * ## Import
 * Config can be imported using any of these accepted formats:
 * * `{{name}}`
 * When using the `pulumi import` command, Config can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:bigquery/dataTransferConfig:DataTransferConfig default {{name}}
 * ```
 */
public class DataTransferConfig internal constructor(
    override val javaResource: com.pulumi.gcp.bigquery.DataTransferConfig,
) : KotlinCustomResource(javaResource, DataTransferConfigMapper) {
    /**
     * The number of days to look back to automatically refresh the data.
     * For example, if dataRefreshWindowDays = 10, then every day BigQuery
     * reingests data for [today-10, today-1], rather than ingesting data for
     * just [today-1]. Only valid if the data source supports the feature.
     * Set the value to 0 to use the default value.
     */
    public val dataRefreshWindowDays: Output?
        get() = javaResource.dataRefreshWindowDays().applyValue({ args0 ->
            args0.map({ args0 ->
                args0
            }).orElse(null)
        })

    /**
     * The data source id. Cannot be changed once the transfer config is created.
     */
    public val dataSourceId: Output
        get() = javaResource.dataSourceId().applyValue({ args0 -> args0 })

    /**
     * The BigQuery target dataset id.
     */
    public val destinationDatasetId: Output?
        get() = javaResource.destinationDatasetId().applyValue({ args0 ->
            args0.map({ args0 ->
                args0
            }).orElse(null)
        })

    /**
     * When set to true, no runs are scheduled for a given transfer.
     */
    public val disabled: Output?
        get() = javaResource.disabled().applyValue({ args0 -> args0.map({ args0 -> args0 }).orElse(null) })

    /**
     * The user specified display name for the transfer config.
     */
    public val displayName: Output
        get() = javaResource.displayName().applyValue({ args0 -> args0 })

    /**
     * Email notifications will be sent according to these preferences to the
     * email address of the user who owns this transfer config.
     * Structure is documented below.
     */
    public val emailPreferences: Output?
        get() = javaResource.emailPreferences().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> dataTransferConfigEmailPreferencesToKotlin(args0) })
            }).orElse(null)
        })

    /**
     * The geographic location where the transfer config should reside.
     * Examples: US, EU, asia-northeast1. The default value is US.
     */
    public val location: Output?
        get() = javaResource.location().applyValue({ args0 -> args0.map({ args0 -> args0 }).orElse(null) })

    /**
     * The resource name of the transfer config. Transfer config names have the
     * form projects/{projectId}/locations/{location}/transferConfigs/{configId}
     * or projects/{projectId}/transferConfigs/{configId},
     * where configId is usually a uuid, but this is not required.
     * The name is ignored when creating a transfer config.
     */
    public val name: Output
        get() = javaResource.name().applyValue({ args0 -> args0 })

    /**
     * Pub/Sub topic where notifications will be sent after transfer runs
     * associated with this transfer config finish.
     */
    public val notificationPubsubTopic: Output?
        get() = javaResource.notificationPubsubTopic().applyValue({ args0 ->
            args0.map({ args0 ->
                args0
            }).orElse(null)
        })

    /**
     * Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
     * section for each data source. For example the parameters for Cloud Storage transfers are listed here:
     * https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
     * **NOTE** : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource.
     * - - -
     */
    public val params: Output>
        get() = javaResource.params().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.key.to(args0.value)
            }).toMap()
        })

    /**
     * The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    public val project: Output
        get() = javaResource.project().applyValue({ args0 -> args0 })

    /**
     * Data transfer schedule. If the data source does not support a custom
     * schedule, this should be empty. If it is empty, the default value for
     * the data source will be used. The specified times are in UTC. Examples
     * of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
     * jun 13:15, and first sunday of quarter 00:00. See more explanation
     * about the format here:
     * https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
     * NOTE: the granularity should be at least 8 hours, or less frequent.
     */
    public val schedule: Output?
        get() = javaResource.schedule().applyValue({ args0 -> args0.map({ args0 -> args0 }).orElse(null) })

    /**
     * Options customizing the data transfer schedule.
     * Structure is documented below.
     */
    public val scheduleOptions: Output?
        get() = javaResource.scheduleOptions().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> dataTransferConfigScheduleOptionsToKotlin(args0) })
            }).orElse(null)
        })

    /**
     * Different parameters are configured primarily using the the `params` field on this
     * resource. This block contains the parameters which contain secrets or passwords so that they can be marked
     * sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key
     * in the `params` map in the api request.
     * Credentials may not be specified in both locations and will cause an error. Changing from one location
     * to a different credential configuration in the config will require an apply to update state.
     * Structure is documented below.
     */
    public val sensitiveParams: Output?
        get() = javaResource.sensitiveParams().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> dataTransferConfigSensitiveParamsToKotlin(args0) })
            }).orElse(null)
        })

    /**
     * Service account email. If this field is set, transfer config will
     * be created with this service account credentials. It requires that
     * requesting user calling this API has permissions to act as this service account.
     */
    public val serviceAccountName: Output?
        get() = javaResource.serviceAccountName().applyValue({ args0 ->
            args0.map({ args0 ->
                args0
            }).orElse(null)
        })
}

public object DataTransferConfigMapper : ResourceMapper {
    override fun supportsMappingOfType(javaResource: Resource): Boolean =
        com.pulumi.gcp.bigquery.DataTransferConfig::class == javaResource::class

    override fun map(javaResource: Resource): DataTransferConfig = DataTransferConfig(
        javaResource as
            com.pulumi.gcp.bigquery.DataTransferConfig,
    )
}

/**
 * @see [DataTransferConfig].
 * @param name The _unique_ name of the resulting resource.
 * @param block Builder for [DataTransferConfig].
 */
public suspend fun dataTransferConfig(
    name: String,
    block: suspend DataTransferConfigResourceBuilder.() -> Unit,
): DataTransferConfig {
    val builder = DataTransferConfigResourceBuilder()
    builder.name(name)
    block(builder)
    return builder.build()
}

/**
 * @see [DataTransferConfig].
 * @param name The _unique_ name of the resulting resource.
 */
public fun dataTransferConfig(name: String): DataTransferConfig {
    val builder = DataTransferConfigResourceBuilder()
    builder.name(name)
    return builder.build()
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy