All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.bigquery.kotlin.DatasetArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.bigquery.kotlin

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.bigquery.DatasetArgs.builder
import com.pulumi.gcp.bigquery.kotlin.inputs.DatasetAccessArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DatasetAccessArgsBuilder
import com.pulumi.gcp.bigquery.kotlin.inputs.DatasetDefaultEncryptionConfigurationArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DatasetDefaultEncryptionConfigurationArgsBuilder
import com.pulumi.gcp.bigquery.kotlin.inputs.DatasetExternalDatasetReferenceArgs
import com.pulumi.gcp.bigquery.kotlin.inputs.DatasetExternalDatasetReferenceArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.Boolean
import kotlin.Int
import kotlin.Pair
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.collections.Map
import kotlin.jvm.JvmName

/**
 * ## Example Usage
 * ### Bigquery Dataset Basic
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
 * const dataset = new gcp.bigquery.Dataset("dataset", {
 *     datasetId: "example_dataset",
 *     friendlyName: "test",
 *     description: "This is a test description",
 *     location: "EU",
 *     defaultTableExpirationMs: 3600000,
 *     labels: {
 *         env: "default",
 *     },
 *     accesses: [
 *         {
 *             role: "OWNER",
 *             userByEmail: bqowner.email,
 *         },
 *         {
 *             role: "READER",
 *             domain: "hashicorp.com",
 *         },
 *     ],
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
 * dataset = gcp.bigquery.Dataset("dataset",
 *     dataset_id="example_dataset",
 *     friendly_name="test",
 *     description="This is a test description",
 *     location="EU",
 *     default_table_expiration_ms=3600000,
 *     labels={
 *         "env": "default",
 *     },
 *     accesses=[
 *         gcp.bigquery.DatasetAccessArgs(
 *             role="OWNER",
 *             user_by_email=bqowner.email,
 *         ),
 *         gcp.bigquery.DatasetAccessArgs(
 *             role="READER",
 *             domain="hashicorp.com",
 *         ),
 *     ])
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
 *     {
 *         AccountId = "bqowner",
 *     });
 *     var dataset = new Gcp.BigQuery.Dataset("dataset", new()
 *     {
 *         DatasetId = "example_dataset",
 *         FriendlyName = "test",
 *         Description = "This is a test description",
 *         Location = "EU",
 *         DefaultTableExpirationMs = 3600000,
 *         Labels =
 *         {
 *             { "env", "default" },
 *         },
 *         Accesses = new[]
 *         {
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Role = "OWNER",
 *                 UserByEmail = bqowner.Email,
 *             },
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Role = "READER",
 *                 Domain = "hashicorp.com",
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/serviceaccount"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
 * 			AccountId: pulumi.String("bqowner"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
 * 			DatasetId:                pulumi.String("example_dataset"),
 * 			FriendlyName:             pulumi.String("test"),
 * 			Description:              pulumi.String("This is a test description"),
 * 			Location:                 pulumi.String("EU"),
 * 			DefaultTableExpirationMs: pulumi.Int(3600000),
 * 			Labels: pulumi.StringMap{
 * 				"env": pulumi.String("default"),
 * 			},
 * 			Accesses: bigquery.DatasetAccessTypeArray{
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Role:        pulumi.String("OWNER"),
 * 					UserByEmail: bqowner.Email,
 * 				},
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Role:   pulumi.String("READER"),
 * 					Domain: pulumi.String("hashicorp.com"),
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.serviceaccount.Account;
 * import com.pulumi.gcp.serviceaccount.AccountArgs;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var bqowner = new Account("bqowner", AccountArgs.builder()
 *             .accountId("bqowner")
 *             .build());
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .datasetId("example_dataset")
 *             .friendlyName("test")
 *             .description("This is a test description")
 *             .location("EU")
 *             .defaultTableExpirationMs(3600000)
 *             .labels(Map.of("env", "default"))
 *             .accesses(
 *                 DatasetAccessArgs.builder()
 *                     .role("OWNER")
 *                     .userByEmail(bqowner.email())
 *                     .build(),
 *                 DatasetAccessArgs.builder()
 *                     .role("READER")
 *                     .domain("hashicorp.com")
 *                     .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   dataset:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: example_dataset
 *       friendlyName: test
 *       description: This is a test description
 *       location: EU
 *       defaultTableExpirationMs: 3.6e+06
 *       labels:
 *         env: default
 *       accesses:
 *         - role: OWNER
 *           userByEmail: ${bqowner.email}
 *         - role: READER
 *           domain: hashicorp.com
 *   bqowner:
 *     type: gcp:serviceaccount:Account
 *     properties:
 *       accountId: bqowner
 * ```
 * 
 * ### Bigquery Dataset Cmek
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const keyRing = new gcp.kms.KeyRing("key_ring", {
 *     name: "example-keyring",
 *     location: "us",
 * });
 * const cryptoKey = new gcp.kms.CryptoKey("crypto_key", {
 *     name: "example-key",
 *     keyRing: keyRing.id,
 * });
 * const dataset = new gcp.bigquery.Dataset("dataset", {
 *     datasetId: "example_dataset",
 *     friendlyName: "test",
 *     description: "This is a test description",
 *     location: "US",
 *     defaultTableExpirationMs: 3600000,
 *     defaultEncryptionConfiguration: {
 *         kmsKeyName: cryptoKey.id,
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * key_ring = gcp.kms.KeyRing("key_ring",
 *     name="example-keyring",
 *     location="us")
 * crypto_key = gcp.kms.CryptoKey("crypto_key",
 *     name="example-key",
 *     key_ring=key_ring.id)
 * dataset = gcp.bigquery.Dataset("dataset",
 *     dataset_id="example_dataset",
 *     friendly_name="test",
 *     description="This is a test description",
 *     location="US",
 *     default_table_expiration_ms=3600000,
 *     default_encryption_configuration=gcp.bigquery.DatasetDefaultEncryptionConfigurationArgs(
 *         kms_key_name=crypto_key.id,
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var keyRing = new Gcp.Kms.KeyRing("key_ring", new()
 *     {
 *         Name = "example-keyring",
 *         Location = "us",
 *     });
 *     var cryptoKey = new Gcp.Kms.CryptoKey("crypto_key", new()
 *     {
 *         Name = "example-key",
 *         KeyRing = keyRing.Id,
 *     });
 *     var dataset = new Gcp.BigQuery.Dataset("dataset", new()
 *     {
 *         DatasetId = "example_dataset",
 *         FriendlyName = "test",
 *         Description = "This is a test description",
 *         Location = "US",
 *         DefaultTableExpirationMs = 3600000,
 *         DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
 *         {
 *             KmsKeyName = cryptoKey.Id,
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/kms"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		keyRing, err := kms.NewKeyRing(ctx, "key_ring", &kms.KeyRingArgs{
 * 			Name:     pulumi.String("example-keyring"),
 * 			Location: pulumi.String("us"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		cryptoKey, err := kms.NewCryptoKey(ctx, "crypto_key", &kms.CryptoKeyArgs{
 * 			Name:    pulumi.String("example-key"),
 * 			KeyRing: keyRing.ID(),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
 * 			DatasetId:                pulumi.String("example_dataset"),
 * 			FriendlyName:             pulumi.String("test"),
 * 			Description:              pulumi.String("This is a test description"),
 * 			Location:                 pulumi.String("US"),
 * 			DefaultTableExpirationMs: pulumi.Int(3600000),
 * 			DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
 * 				KmsKeyName: cryptoKey.ID(),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.kms.KeyRing;
 * import com.pulumi.gcp.kms.KeyRingArgs;
 * import com.pulumi.gcp.kms.CryptoKey;
 * import com.pulumi.gcp.kms.CryptoKeyArgs;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetDefaultEncryptionConfigurationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
 *             .name("example-keyring")
 *             .location("us")
 *             .build());
 *         var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
 *             .name("example-key")
 *             .keyRing(keyRing.id())
 *             .build());
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .datasetId("example_dataset")
 *             .friendlyName("test")
 *             .description("This is a test description")
 *             .location("US")
 *             .defaultTableExpirationMs(3600000)
 *             .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
 *                 .kmsKeyName(cryptoKey.id())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   dataset:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: example_dataset
 *       friendlyName: test
 *       description: This is a test description
 *       location: US
 *       defaultTableExpirationMs: 3.6e+06
 *       defaultEncryptionConfiguration:
 *         kmsKeyName: ${cryptoKey.id}
 *   cryptoKey:
 *     type: gcp:kms:CryptoKey
 *     name: crypto_key
 *     properties:
 *       name: example-key
 *       keyRing: ${keyRing.id}
 *   keyRing:
 *     type: gcp:kms:KeyRing
 *     name: key_ring
 *     properties:
 *       name: example-keyring
 *       location: us
 * ```
 * 
 * ### Bigquery Dataset Authorized Dataset
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
 * const _public = new gcp.bigquery.Dataset("public", {
 *     datasetId: "public",
 *     friendlyName: "test",
 *     description: "This dataset is public",
 *     location: "EU",
 *     defaultTableExpirationMs: 3600000,
 *     labels: {
 *         env: "default",
 *     },
 *     accesses: [
 *         {
 *             role: "OWNER",
 *             userByEmail: bqowner.email,
 *         },
 *         {
 *             role: "READER",
 *             domain: "hashicorp.com",
 *         },
 *     ],
 * });
 * const dataset = new gcp.bigquery.Dataset("dataset", {
 *     datasetId: "private",
 *     friendlyName: "test",
 *     description: "This dataset is private",
 *     location: "EU",
 *     defaultTableExpirationMs: 3600000,
 *     labels: {
 *         env: "default",
 *     },
 *     accesses: [
 *         {
 *             role: "OWNER",
 *             userByEmail: bqowner.email,
 *         },
 *         {
 *             role: "READER",
 *             domain: "hashicorp.com",
 *         },
 *         {
 *             dataset: {
 *                 dataset: {
 *                     projectId: _public.project,
 *                     datasetId: _public.datasetId,
 *                 },
 *                 targetTypes: ["VIEWS"],
 *             },
 *         },
 *     ],
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
 * public = gcp.bigquery.Dataset("public",
 *     dataset_id="public",
 *     friendly_name="test",
 *     description="This dataset is public",
 *     location="EU",
 *     default_table_expiration_ms=3600000,
 *     labels={
 *         "env": "default",
 *     },
 *     accesses=[
 *         gcp.bigquery.DatasetAccessArgs(
 *             role="OWNER",
 *             user_by_email=bqowner.email,
 *         ),
 *         gcp.bigquery.DatasetAccessArgs(
 *             role="READER",
 *             domain="hashicorp.com",
 *         ),
 *     ])
 * dataset = gcp.bigquery.Dataset("dataset",
 *     dataset_id="private",
 *     friendly_name="test",
 *     description="This dataset is private",
 *     location="EU",
 *     default_table_expiration_ms=3600000,
 *     labels={
 *         "env": "default",
 *     },
 *     accesses=[
 *         gcp.bigquery.DatasetAccessArgs(
 *             role="OWNER",
 *             user_by_email=bqowner.email,
 *         ),
 *         gcp.bigquery.DatasetAccessArgs(
 *             role="READER",
 *             domain="hashicorp.com",
 *         ),
 *         gcp.bigquery.DatasetAccessArgs(
 *             dataset=gcp.bigquery.DatasetAccessDatasetArgs(
 *                 dataset=gcp.bigquery.DatasetAccessDatasetDatasetArgs(
 *                     project_id=public.project,
 *                     dataset_id=public.dataset_id,
 *                 ),
 *                 target_types=["VIEWS"],
 *             ),
 *         ),
 *     ])
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
 *     {
 *         AccountId = "bqowner",
 *     });
 *     var @public = new Gcp.BigQuery.Dataset("public", new()
 *     {
 *         DatasetId = "public",
 *         FriendlyName = "test",
 *         Description = "This dataset is public",
 *         Location = "EU",
 *         DefaultTableExpirationMs = 3600000,
 *         Labels =
 *         {
 *             { "env", "default" },
 *         },
 *         Accesses = new[]
 *         {
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Role = "OWNER",
 *                 UserByEmail = bqowner.Email,
 *             },
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Role = "READER",
 *                 Domain = "hashicorp.com",
 *             },
 *         },
 *     });
 *     var dataset = new Gcp.BigQuery.Dataset("dataset", new()
 *     {
 *         DatasetId = "private",
 *         FriendlyName = "test",
 *         Description = "This dataset is private",
 *         Location = "EU",
 *         DefaultTableExpirationMs = 3600000,
 *         Labels =
 *         {
 *             { "env", "default" },
 *         },
 *         Accesses = new[]
 *         {
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Role = "OWNER",
 *                 UserByEmail = bqowner.Email,
 *             },
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Role = "READER",
 *                 Domain = "hashicorp.com",
 *             },
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
 *                 {
 *                     Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
 *                     {
 *                         ProjectId = @public.Project,
 *                         DatasetId = @public.DatasetId,
 *                     },
 *                     TargetTypes = new[]
 *                     {
 *                         "VIEWS",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/serviceaccount"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
 * 			AccountId: pulumi.String("bqowner"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
 * 			DatasetId:                pulumi.String("public"),
 * 			FriendlyName:             pulumi.String("test"),
 * 			Description:              pulumi.String("This dataset is public"),
 * 			Location:                 pulumi.String("EU"),
 * 			DefaultTableExpirationMs: pulumi.Int(3600000),
 * 			Labels: pulumi.StringMap{
 * 				"env": pulumi.String("default"),
 * 			},
 * 			Accesses: bigquery.DatasetAccessTypeArray{
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Role:        pulumi.String("OWNER"),
 * 					UserByEmail: bqowner.Email,
 * 				},
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Role:   pulumi.String("READER"),
 * 					Domain: pulumi.String("hashicorp.com"),
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
 * 			DatasetId:                pulumi.String("private"),
 * 			FriendlyName:             pulumi.String("test"),
 * 			Description:              pulumi.String("This dataset is private"),
 * 			Location:                 pulumi.String("EU"),
 * 			DefaultTableExpirationMs: pulumi.Int(3600000),
 * 			Labels: pulumi.StringMap{
 * 				"env": pulumi.String("default"),
 * 			},
 * 			Accesses: bigquery.DatasetAccessTypeArray{
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Role:        pulumi.String("OWNER"),
 * 					UserByEmail: bqowner.Email,
 * 				},
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Role:   pulumi.String("READER"),
 * 					Domain: pulumi.String("hashicorp.com"),
 * 				},
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Dataset: &bigquery.DatasetAccessDatasetArgs{
 * 						Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
 * 							ProjectId: public.Project,
 * 							DatasetId: public.DatasetId,
 * 						},
 * 						TargetTypes: pulumi.StringArray{
 * 							pulumi.String("VIEWS"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.serviceaccount.Account;
 * import com.pulumi.gcp.serviceaccount.AccountArgs;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetDatasetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var bqowner = new Account("bqowner", AccountArgs.builder()
 *             .accountId("bqowner")
 *             .build());
 *         var public_ = new Dataset("public", DatasetArgs.builder()
 *             .datasetId("public")
 *             .friendlyName("test")
 *             .description("This dataset is public")
 *             .location("EU")
 *             .defaultTableExpirationMs(3600000)
 *             .labels(Map.of("env", "default"))
 *             .accesses(
 *                 DatasetAccessArgs.builder()
 *                     .role("OWNER")
 *                     .userByEmail(bqowner.email())
 *                     .build(),
 *                 DatasetAccessArgs.builder()
 *                     .role("READER")
 *                     .domain("hashicorp.com")
 *                     .build())
 *             .build());
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .datasetId("private")
 *             .friendlyName("test")
 *             .description("This dataset is private")
 *             .location("EU")
 *             .defaultTableExpirationMs(3600000)
 *             .labels(Map.of("env", "default"))
 *             .accesses(
 *                 DatasetAccessArgs.builder()
 *                     .role("OWNER")
 *                     .userByEmail(bqowner.email())
 *                     .build(),
 *                 DatasetAccessArgs.builder()
 *                     .role("READER")
 *                     .domain("hashicorp.com")
 *                     .build(),
 *                 DatasetAccessArgs.builder()
 *                     .dataset(DatasetAccessDatasetArgs.builder()
 *                         .dataset(DatasetAccessDatasetDatasetArgs.builder()
 *                             .projectId(public_.project())
 *                             .datasetId(public_.datasetId())
 *                             .build())
 *                         .targetTypes("VIEWS")
 *                         .build())
 *                     .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   public:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: public
 *       friendlyName: test
 *       description: This dataset is public
 *       location: EU
 *       defaultTableExpirationMs: 3.6e+06
 *       labels:
 *         env: default
 *       accesses:
 *         - role: OWNER
 *           userByEmail: ${bqowner.email}
 *         - role: READER
 *           domain: hashicorp.com
 *   dataset:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: private
 *       friendlyName: test
 *       description: This dataset is private
 *       location: EU
 *       defaultTableExpirationMs: 3.6e+06
 *       labels:
 *         env: default
 *       accesses:
 *         - role: OWNER
 *           userByEmail: ${bqowner.email}
 *         - role: READER
 *           domain: hashicorp.com
 *         - dataset:
 *             dataset:
 *               projectId: ${public.project}
 *               datasetId: ${public.datasetId}
 *             targetTypes:
 *               - VIEWS
 *   bqowner:
 *     type: gcp:serviceaccount:Account
 *     properties:
 *       accountId: bqowner
 * ```
 * 
 * ### Bigquery Dataset Authorized Routine
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const _public = new gcp.bigquery.Dataset("public", {
 *     datasetId: "public_dataset",
 *     description: "This dataset is public",
 * });
 * const publicRoutine = new gcp.bigquery.Routine("public", {
 *     datasetId: _public.datasetId,
 *     routineId: "public_routine",
 *     routineType: "TABLE_VALUED_FUNCTION",
 *     language: "SQL",
 *     definitionBody: "SELECT 1 + value AS value\n",
 *     arguments: [{
 *         name: "value",
 *         argumentKind: "FIXED_TYPE",
 *         dataType: JSON.stringify({
 *             typeKind: "INT64",
 *         }),
 *     }],
 *     returnTableType: JSON.stringify({
 *         columns: [{
 *             name: "value",
 *             type: {
 *                 typeKind: "INT64",
 *             },
 *         }],
 *     }),
 * });
 * const _private = new gcp.bigquery.Dataset("private", {
 *     datasetId: "private_dataset",
 *     description: "This dataset is private",
 *     accesses: [
 *         {
 *             role: "OWNER",
 *             userByEmail: "[email protected]",
 *         },
 *         {
 *             routine: {
 *                 projectId: publicRoutine.project,
 *                 datasetId: publicRoutine.datasetId,
 *                 routineId: publicRoutine.routineId,
 *             },
 *         },
 *     ],
 * });
 * ```
 * ```python
 * import pulumi
 * import json
 * import pulumi_gcp as gcp
 * public = gcp.bigquery.Dataset("public",
 *     dataset_id="public_dataset",
 *     description="This dataset is public")
 * public_routine = gcp.bigquery.Routine("public",
 *     dataset_id=public.dataset_id,
 *     routine_id="public_routine",
 *     routine_type="TABLE_VALUED_FUNCTION",
 *     language="SQL",
 *     definition_body="SELECT 1 + value AS value\n",
 *     arguments=[gcp.bigquery.RoutineArgumentArgs(
 *         name="value",
 *         argument_kind="FIXED_TYPE",
 *         data_type=json.dumps({
 *             "typeKind": "INT64",
 *         }),
 *     )],
 *     return_table_type=json.dumps({
 *         "columns": [{
 *             "name": "value",
 *             "type": {
 *                 "typeKind": "INT64",
 *             },
 *         }],
 *     }))
 * private = gcp.bigquery.Dataset("private",
 *     dataset_id="private_dataset",
 *     description="This dataset is private",
 *     accesses=[
 *         gcp.bigquery.DatasetAccessArgs(
 *             role="OWNER",
 *             user_by_email="[email protected]",
 *         ),
 *         gcp.bigquery.DatasetAccessArgs(
 *             routine=gcp.bigquery.DatasetAccessRoutineArgs(
 *                 project_id=public_routine.project,
 *                 dataset_id=public_routine.dataset_id,
 *                 routine_id=public_routine.routine_id,
 *             ),
 *         ),
 *     ])
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using System.Text.Json;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var @public = new Gcp.BigQuery.Dataset("public", new()
 *     {
 *         DatasetId = "public_dataset",
 *         Description = "This dataset is public",
 *     });
 *     var publicRoutine = new Gcp.BigQuery.Routine("public", new()
 *     {
 *         DatasetId = @public.DatasetId,
 *         RoutineId = "public_routine",
 *         RoutineType = "TABLE_VALUED_FUNCTION",
 *         Language = "SQL",
 *         DefinitionBody = @"SELECT 1 + value AS value
 * ",
 *         Arguments = new[]
 *         {
 *             new Gcp.BigQuery.Inputs.RoutineArgumentArgs
 *             {
 *                 Name = "value",
 *                 ArgumentKind = "FIXED_TYPE",
 *                 DataType = JsonSerializer.Serialize(new Dictionary
 *                 {
 *                     ["typeKind"] = "INT64",
 *                 }),
 *             },
 *         },
 *         ReturnTableType = JsonSerializer.Serialize(new Dictionary
 *         {
 *             ["columns"] = new[]
 *             {
 *                 new Dictionary
 *                 {
 *                     ["name"] = "value",
 *                     ["type"] = new Dictionary
 *                     {
 *                         ["typeKind"] = "INT64",
 *                     },
 *                 },
 *             },
 *         }),
 *     });
 *     var @private = new Gcp.BigQuery.Dataset("private", new()
 *     {
 *         DatasetId = "private_dataset",
 *         Description = "This dataset is private",
 *         Accesses = new[]
 *         {
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Role = "OWNER",
 *                 UserByEmail = "[email protected]",
 *             },
 *             new Gcp.BigQuery.Inputs.DatasetAccessArgs
 *             {
 *                 Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
 *                 {
 *                     ProjectId = publicRoutine.Project,
 *                     DatasetId = publicRoutine.DatasetId,
 *                     RoutineId = publicRoutine.RoutineId,
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"encoding/json"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
 * 			DatasetId:   pulumi.String("public_dataset"),
 * 			Description: pulumi.String("This dataset is public"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		tmpJSON0, err := json.Marshal(map[string]interface{}{
 * 			"typeKind": "INT64",
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		json0 := string(tmpJSON0)
 * 		tmpJSON1, err := json.Marshal(map[string]interface{}{
 * 			"columns": []map[string]interface{}{
 * 				map[string]interface{}{
 * 					"name": "value",
 * 					"type": map[string]interface{}{
 * 						"typeKind": "INT64",
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		json1 := string(tmpJSON1)
 * 		publicRoutine, err := bigquery.NewRoutine(ctx, "public", &bigquery.RoutineArgs{
 * 			DatasetId:      public.DatasetId,
 * 			RoutineId:      pulumi.String("public_routine"),
 * 			RoutineType:    pulumi.String("TABLE_VALUED_FUNCTION"),
 * 			Language:       pulumi.String("SQL"),
 * 			DefinitionBody: pulumi.String("SELECT 1 + value AS value\n"),
 * 			Arguments: bigquery.RoutineArgumentArray{
 * 				&bigquery.RoutineArgumentArgs{
 * 					Name:         pulumi.String("value"),
 * 					ArgumentKind: pulumi.String("FIXED_TYPE"),
 * 					DataType:     pulumi.String(json0),
 * 				},
 * 			},
 * 			ReturnTableType: pulumi.String(json1),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = bigquery.NewDataset(ctx, "private", &bigquery.DatasetArgs{
 * 			DatasetId:   pulumi.String("private_dataset"),
 * 			Description: pulumi.String("This dataset is private"),
 * 			Accesses: bigquery.DatasetAccessTypeArray{
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Role:        pulumi.String("OWNER"),
 * 					UserByEmail: pulumi.String("[email protected]"),
 * 				},
 * 				&bigquery.DatasetAccessTypeArgs{
 * 					Routine: &bigquery.DatasetAccessRoutineArgs{
 * 						ProjectId: publicRoutine.Project,
 * 						DatasetId: publicRoutine.DatasetId,
 * 						RoutineId: publicRoutine.RoutineId,
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.Routine;
 * import com.pulumi.gcp.bigquery.RoutineArgs;
 * import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetAccessRoutineArgs;
 * import static com.pulumi.codegen.internal.Serialization.*;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var public_ = new Dataset("public", DatasetArgs.builder()
 *             .datasetId("public_dataset")
 *             .description("This dataset is public")
 *             .build());
 *         var publicRoutine = new Routine("publicRoutine", RoutineArgs.builder()
 *             .datasetId(public_.datasetId())
 *             .routineId("public_routine")
 *             .routineType("TABLE_VALUED_FUNCTION")
 *             .language("SQL")
 *             .definitionBody("""
 * SELECT 1 + value AS value
 *             """)
 *             .arguments(RoutineArgumentArgs.builder()
 *                 .name("value")
 *                 .argumentKind("FIXED_TYPE")
 *                 .dataType(serializeJson(
 *                     jsonObject(
 *                         jsonProperty("typeKind", "INT64")
 *                     )))
 *                 .build())
 *             .returnTableType(serializeJson(
 *                 jsonObject(
 *                     jsonProperty("columns", jsonArray(jsonObject(
 *                         jsonProperty("name", "value"),
 *                         jsonProperty("type", jsonObject(
 *                             jsonProperty("typeKind", "INT64")
 *                         ))
 *                     )))
 *                 )))
 *             .build());
 *         var private_ = new Dataset("private", DatasetArgs.builder()
 *             .datasetId("private_dataset")
 *             .description("This dataset is private")
 *             .accesses(
 *                 DatasetAccessArgs.builder()
 *                     .role("OWNER")
 *                     .userByEmail("[email protected]")
 *                     .build(),
 *                 DatasetAccessArgs.builder()
 *                     .routine(DatasetAccessRoutineArgs.builder()
 *                         .projectId(publicRoutine.project())
 *                         .datasetId(publicRoutine.datasetId())
 *                         .routineId(publicRoutine.routineId())
 *                         .build())
 *                     .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   public:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: public_dataset
 *       description: This dataset is public
 *   publicRoutine:
 *     type: gcp:bigquery:Routine
 *     name: public
 *     properties:
 *       datasetId: ${public.datasetId}
 *       routineId: public_routine
 *       routineType: TABLE_VALUED_FUNCTION
 *       language: SQL
 *       definitionBody: |
 *         SELECT 1 + value AS value
 *       arguments:
 *         - name: value
 *           argumentKind: FIXED_TYPE
 *           dataType:
 *             fn::toJSON:
 *               typeKind: INT64
 *       returnTableType:
 *         fn::toJSON:
 *           columns:
 *             - name: value
 *               type:
 *                 typeKind: INT64
 *   private:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: private_dataset
 *       description: This dataset is private
 *       accesses:
 *         - role: OWNER
 *           userByEmail: [email protected]
 *         - routine:
 *             projectId: ${publicRoutine.project}
 *             datasetId: ${publicRoutine.datasetId}
 *             routineId: ${publicRoutine.routineId}
 * ```
 * 
 * ### Bigquery Dataset External Reference Aws
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const dataset = new gcp.bigquery.Dataset("dataset", {
 *     datasetId: "example_dataset",
 *     friendlyName: "test",
 *     description: "This is a test description",
 *     location: "aws-us-east-1",
 *     externalDatasetReference: {
 *         externalSource: "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
 *         connection: "projects/project/locations/aws-us-east-1/connections/connection",
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * dataset = gcp.bigquery.Dataset("dataset",
 *     dataset_id="example_dataset",
 *     friendly_name="test",
 *     description="This is a test description",
 *     location="aws-us-east-1",
 *     external_dataset_reference=gcp.bigquery.DatasetExternalDatasetReferenceArgs(
 *         external_source="aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
 *         connection="projects/project/locations/aws-us-east-1/connections/connection",
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var dataset = new Gcp.BigQuery.Dataset("dataset", new()
 *     {
 *         DatasetId = "example_dataset",
 *         FriendlyName = "test",
 *         Description = "This is a test description",
 *         Location = "aws-us-east-1",
 *         ExternalDatasetReference = new Gcp.BigQuery.Inputs.DatasetExternalDatasetReferenceArgs
 *         {
 *             ExternalSource = "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
 *             Connection = "projects/project/locations/aws-us-east-1/connections/connection",
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
 * 			DatasetId:    pulumi.String("example_dataset"),
 * 			FriendlyName: pulumi.String("test"),
 * 			Description:  pulumi.String("This is a test description"),
 * 			Location:     pulumi.String("aws-us-east-1"),
 * 			ExternalDatasetReference: &bigquery.DatasetExternalDatasetReferenceArgs{
 * 				ExternalSource: pulumi.String("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database"),
 * 				Connection:     pulumi.String("projects/project/locations/aws-us-east-1/connections/connection"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.inputs.DatasetExternalDatasetReferenceArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .datasetId("example_dataset")
 *             .friendlyName("test")
 *             .description("This is a test description")
 *             .location("aws-us-east-1")
 *             .externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
 *                 .externalSource("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database")
 *                 .connection("projects/project/locations/aws-us-east-1/connections/connection")
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   dataset:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: example_dataset
 *       friendlyName: test
 *       description: This is a test description
 *       location: aws-us-east-1
 *       externalDatasetReference:
 *         externalSource: aws-glue://arn:aws:glue:us-east-1:999999999999:database/database
 *         connection: projects/project/locations/aws-us-east-1/connections/connection
 * ```
 * 
 * ## Import
 * Dataset can be imported using any of these accepted formats:
 * * `projects/{{project}}/datasets/{{dataset_id}}`
 * * `{{project}}/{{dataset_id}}`
 * * `{{dataset_id}}`
 * When using the `pulumi import` command, Dataset can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:bigquery/dataset:Dataset default projects/{{project}}/datasets/{{dataset_id}}
 * ```
 * ```sh
 * $ pulumi import gcp:bigquery/dataset:Dataset default {{project}}/{{dataset_id}}
 * ```
 * ```sh
 * $ pulumi import gcp:bigquery/dataset:Dataset default {{dataset_id}}
 * ```
 * @property accesses An array of objects that define dataset access for one or more entities.
 * Structure is documented below.
 * @property datasetId A unique ID for this dataset, without the project name. The ID
 * must contain only letters (a-z, A-Z), numbers (0-9), or
 * underscores (_). The maximum length is 1,024 characters.
 * - - -
 * @property defaultCollation Defines the default collation specification of future tables created
 * in the dataset. If a table is created in this dataset without table-level
 * default collation, then the table inherits the dataset default collation,
 * which is applied to the string fields that do not have explicit collation
 * specified. A change to this field affects only tables created afterwards,
 * and does not alter the existing tables.
 * The following values are supported:
 * - 'und:ci': undetermined locale, case insensitive.
 * - '': empty string. Default to case-sensitive behavior.
 * @property defaultEncryptionConfiguration The default encryption key for all tables in the dataset. Once this property is set,
 * all newly-created partitioned tables in the dataset will have encryption key set to
 * this value, unless table creation request (or query) overrides the key.
 * Structure is documented below.
 * @property defaultPartitionExpirationMs The default partition expiration for all partitioned tables in
 * the dataset, in milliseconds.
 * Once this property is set, all newly-created partitioned tables in
 * the dataset will have an `expirationMs` property in the `timePartitioning`
 * settings set to this value, and changing the value will only
 * affect new tables, not existing ones. The storage in a partition will
 * have an expiration time of its partition time plus this value.
 * Setting this property overrides the use of `defaultTableExpirationMs`
 * for partitioned tables: only one of `defaultTableExpirationMs` and
 * `defaultPartitionExpirationMs` will be used for any new partitioned
 * table. If you provide an explicit `timePartitioning.expirationMs` when
 * creating or updating a partitioned table, that value takes precedence
 * over the default partition expiration time indicated by this property.
 * @property defaultTableExpirationMs The default lifetime of all tables in the dataset, in milliseconds.
 * The minimum value is 3600000 milliseconds (one hour).
 * Once this property is set, all newly-created tables in the dataset
 * will have an `expirationTime` property set to the creation time plus
 * the value in this property, and changing the value will only affect
 * new tables, not existing ones. When the `expirationTime` for a given
 * table is reached, that table will be deleted automatically.
 * If a table's `expirationTime` is modified or removed before the
 * table expires, or if you provide an explicit `expirationTime` when
 * creating a table, that value takes precedence over the default
 * expiration time indicated by this property.
 * @property deleteContentsOnDestroy If set to `true`, delete all the tables in the
 * dataset when destroying the resource; otherwise,
 * destroying the resource will fail if tables are present.
 * @property description A user-friendly description of the dataset
 * @property externalDatasetReference Information about the external metadata storage where the dataset is defined.
 * Structure is documented below.
 * @property friendlyName A descriptive name for the dataset
 * @property isCaseInsensitive TRUE if the dataset and its table names are case-insensitive, otherwise FALSE.
 * By default, this is FALSE, which means the dataset and its table names are
 * case-sensitive. This field does not affect routine references.
 * @property labels The labels associated with this dataset. You can use these to
 * organize and group your datasets.
 * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
 * Please refer to the field `effective_labels` for all of the labels present on the resource.
 * @property location The geographic location where the dataset should reside.
 * See [official docs](https://cloud.google.com/bigquery/docs/dataset-locations).
 * There are two types of locations, regional or multi-regional. A regional
 * location is a specific geographic place, such as Tokyo, and a multi-regional
 * location is a large geographic area, such as the United States, that
 * contains at least two geographic places.
 * The default value is multi-regional location `US`.
 * Changing this forces a new resource to be created.
 * @property maxTimeTravelHours Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
 * @property project The ID of the project in which the resource belongs.
 * If it is not provided, the provider project is used.
 * @property storageBillingModel Specifies the storage billing model for the dataset.
 * Set this flag value to LOGICAL to use logical bytes for storage billing,
 * or to PHYSICAL to use physical bytes instead.
 * LOGICAL is the default if this flag isn't specified.
 */
public data class DatasetArgs(
    public val accesses: Output>? = null,
    public val datasetId: Output? = null,
    public val defaultCollation: Output? = null,
    public val defaultEncryptionConfiguration: Output? =
        null,
    public val defaultPartitionExpirationMs: Output? = null,
    public val defaultTableExpirationMs: Output? = null,
    public val deleteContentsOnDestroy: Output? = null,
    public val description: Output? = null,
    public val externalDatasetReference: Output? = null,
    public val friendlyName: Output? = null,
    public val isCaseInsensitive: Output? = null,
    public val labels: Output>? = null,
    public val location: Output? = null,
    public val maxTimeTravelHours: Output? = null,
    public val project: Output? = null,
    public val storageBillingModel: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.bigquery.DatasetArgs =
        com.pulumi.gcp.bigquery.DatasetArgs.builder()
            .accesses(
                accesses?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            )
            .datasetId(datasetId?.applyValue({ args0 -> args0 }))
            .defaultCollation(defaultCollation?.applyValue({ args0 -> args0 }))
            .defaultEncryptionConfiguration(
                defaultEncryptionConfiguration?.applyValue({ args0 ->
                    args0.let({ args0 -> args0.toJava() })
                }),
            )
            .defaultPartitionExpirationMs(defaultPartitionExpirationMs?.applyValue({ args0 -> args0 }))
            .defaultTableExpirationMs(defaultTableExpirationMs?.applyValue({ args0 -> args0 }))
            .deleteContentsOnDestroy(deleteContentsOnDestroy?.applyValue({ args0 -> args0 }))
            .description(description?.applyValue({ args0 -> args0 }))
            .externalDatasetReference(
                externalDatasetReference?.applyValue({ args0 ->
                    args0.let({ args0 ->
                        args0.toJava()
                    })
                }),
            )
            .friendlyName(friendlyName?.applyValue({ args0 -> args0 }))
            .isCaseInsensitive(isCaseInsensitive?.applyValue({ args0 -> args0 }))
            .labels(labels?.applyValue({ args0 -> args0.map({ args0 -> args0.key.to(args0.value) }).toMap() }))
            .location(location?.applyValue({ args0 -> args0 }))
            .maxTimeTravelHours(maxTimeTravelHours?.applyValue({ args0 -> args0 }))
            .project(project?.applyValue({ args0 -> args0 }))
            .storageBillingModel(storageBillingModel?.applyValue({ args0 -> args0 })).build()
}

/**
 * Builder for [DatasetArgs].
 */
@PulumiTagMarker
public class DatasetArgsBuilder internal constructor() {
    private var accesses: Output>? = null

    private var datasetId: Output? = null

    private var defaultCollation: Output? = null

    private var defaultEncryptionConfiguration: Output? =
        null

    private var defaultPartitionExpirationMs: Output? = null

    private var defaultTableExpirationMs: Output? = null

    private var deleteContentsOnDestroy: Output? = null

    private var description: Output? = null

    private var externalDatasetReference: Output? = null

    private var friendlyName: Output? = null

    private var isCaseInsensitive: Output? = null

    private var labels: Output>? = null

    private var location: Output? = null

    private var maxTimeTravelHours: Output? = null

    private var project: Output? = null

    private var storageBillingModel: Output? = null

    /**
     * @param value An array of objects that define dataset access for one or more entities.
     * Structure is documented below.
     */
    @JvmName("gxmmjxvejjhlampy")
    public suspend fun accesses(`value`: Output>) {
        this.accesses = value
    }

    @JvmName("itfkhrgdaldeiweg")
    public suspend fun accesses(vararg values: Output) {
        this.accesses = Output.all(values.asList())
    }

    /**
     * @param values An array of objects that define dataset access for one or more entities.
     * Structure is documented below.
     */
    @JvmName("ifsquooiavdvwgws")
    public suspend fun accesses(values: List>) {
        this.accesses = Output.all(values)
    }

    /**
     * @param value A unique ID for this dataset, without the project name. The ID
     * must contain only letters (a-z, A-Z), numbers (0-9), or
     * underscores (_). The maximum length is 1,024 characters.
     * - - -
     */
    @JvmName("fggvtvrnegdanktl")
    public suspend fun datasetId(`value`: Output) {
        this.datasetId = value
    }

    /**
     * @param value Defines the default collation specification of future tables created
     * in the dataset. If a table is created in this dataset without table-level
     * default collation, then the table inherits the dataset default collation,
     * which is applied to the string fields that do not have explicit collation
     * specified. A change to this field affects only tables created afterwards,
     * and does not alter the existing tables.
     * The following values are supported:
     * - 'und:ci': undetermined locale, case insensitive.
     * - '': empty string. Default to case-sensitive behavior.
     */
    @JvmName("udfdjpkgrhwcgtyx")
    public suspend fun defaultCollation(`value`: Output) {
        this.defaultCollation = value
    }

    /**
     * @param value The default encryption key for all tables in the dataset. Once this property is set,
     * all newly-created partitioned tables in the dataset will have encryption key set to
     * this value, unless table creation request (or query) overrides the key.
     * Structure is documented below.
     */
    @JvmName("spreptirekstinxu")
    public suspend fun defaultEncryptionConfiguration(`value`: Output) {
        this.defaultEncryptionConfiguration = value
    }

    /**
     * @param value The default partition expiration for all partitioned tables in
     * the dataset, in milliseconds.
     * Once this property is set, all newly-created partitioned tables in
     * the dataset will have an `expirationMs` property in the `timePartitioning`
     * settings set to this value, and changing the value will only
     * affect new tables, not existing ones. The storage in a partition will
     * have an expiration time of its partition time plus this value.
     * Setting this property overrides the use of `defaultTableExpirationMs`
     * for partitioned tables: only one of `defaultTableExpirationMs` and
     * `defaultPartitionExpirationMs` will be used for any new partitioned
     * table. If you provide an explicit `timePartitioning.expirationMs` when
     * creating or updating a partitioned table, that value takes precedence
     * over the default partition expiration time indicated by this property.
     */
    @JvmName("lwycexrowtcwapqg")
    public suspend fun defaultPartitionExpirationMs(`value`: Output) {
        this.defaultPartitionExpirationMs = value
    }

    /**
     * @param value The default lifetime of all tables in the dataset, in milliseconds.
     * The minimum value is 3600000 milliseconds (one hour).
     * Once this property is set, all newly-created tables in the dataset
     * will have an `expirationTime` property set to the creation time plus
     * the value in this property, and changing the value will only affect
     * new tables, not existing ones. When the `expirationTime` for a given
     * table is reached, that table will be deleted automatically.
     * If a table's `expirationTime` is modified or removed before the
     * table expires, or if you provide an explicit `expirationTime` when
     * creating a table, that value takes precedence over the default
     * expiration time indicated by this property.
     */
    @JvmName("nhxgxlbmqmdcvdxf")
    public suspend fun defaultTableExpirationMs(`value`: Output) {
        this.defaultTableExpirationMs = value
    }

    /**
     * @param value If set to `true`, delete all the tables in the
     * dataset when destroying the resource; otherwise,
     * destroying the resource will fail if tables are present.
     */
    @JvmName("xliljqrdvxrrgorl")
    public suspend fun deleteContentsOnDestroy(`value`: Output) {
        this.deleteContentsOnDestroy = value
    }

    /**
     * @param value A user-friendly description of the dataset
     */
    @JvmName("xfjirjbfbjsaxfri")
    public suspend fun description(`value`: Output) {
        this.description = value
    }

    /**
     * @param value Information about the external metadata storage where the dataset is defined.
     * Structure is documented below.
     */
    @JvmName("qtylrvjkiihlchwu")
    public suspend fun externalDatasetReference(`value`: Output) {
        this.externalDatasetReference = value
    }

    /**
     * @param value A descriptive name for the dataset
     */
    @JvmName("rrwmhifhxnncdlkr")
    public suspend fun friendlyName(`value`: Output) {
        this.friendlyName = value
    }

    /**
     * @param value TRUE if the dataset and its table names are case-insensitive, otherwise FALSE.
     * By default, this is FALSE, which means the dataset and its table names are
     * case-sensitive. This field does not affect routine references.
     */
    @JvmName("qxplcssmivhachnp")
    public suspend fun isCaseInsensitive(`value`: Output) {
        this.isCaseInsensitive = value
    }

    /**
     * @param value The labels associated with this dataset. You can use these to
     * organize and group your datasets.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("pkyfwwnjsgoftgwx")
    public suspend fun labels(`value`: Output>) {
        this.labels = value
    }

    /**
     * @param value The geographic location where the dataset should reside.
     * See [official docs](https://cloud.google.com/bigquery/docs/dataset-locations).
     * There are two types of locations, regional or multi-regional. A regional
     * location is a specific geographic place, such as Tokyo, and a multi-regional
     * location is a large geographic area, such as the United States, that
     * contains at least two geographic places.
     * The default value is multi-regional location `US`.
     * Changing this forces a new resource to be created.
     */
    @JvmName("wreregvcbcbtqoqi")
    public suspend fun location(`value`: Output) {
        this.location = value
    }

    /**
     * @param value Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
     */
    @JvmName("aubcivagbnyvkefu")
    public suspend fun maxTimeTravelHours(`value`: Output) {
        this.maxTimeTravelHours = value
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("kfqulqobgeitiqrl")
    public suspend fun project(`value`: Output) {
        this.project = value
    }

    /**
     * @param value Specifies the storage billing model for the dataset.
     * Set this flag value to LOGICAL to use logical bytes for storage billing,
     * or to PHYSICAL to use physical bytes instead.
     * LOGICAL is the default if this flag isn't specified.
     */
    @JvmName("llinwvixklmcgksh")
    public suspend fun storageBillingModel(`value`: Output) {
        this.storageBillingModel = value
    }

    /**
     * @param value An array of objects that define dataset access for one or more entities.
     * Structure is documented below.
     */
    @JvmName("sgmvysymbgmloqji")
    public suspend fun accesses(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.accesses = mapped
    }

    /**
     * @param argument An array of objects that define dataset access for one or more entities.
     * Structure is documented below.
     */
    @JvmName("ggroainlxridqvtr")
    public suspend fun accesses(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            DatasetAccessArgsBuilder().applySuspend { it() }.build()
        }
        val mapped = of(toBeMapped)
        this.accesses = mapped
    }

    /**
     * @param argument An array of objects that define dataset access for one or more entities.
     * Structure is documented below.
     */
    @JvmName("mrunqpgfkrmqmbty")
    public suspend fun accesses(vararg argument: suspend DatasetAccessArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            DatasetAccessArgsBuilder().applySuspend { it() }.build()
        }
        val mapped = of(toBeMapped)
        this.accesses = mapped
    }

    /**
     * @param argument An array of objects that define dataset access for one or more entities.
     * Structure is documented below.
     */
    @JvmName("iffqavfiswkfybcu")
    public suspend fun accesses(argument: suspend DatasetAccessArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(DatasetAccessArgsBuilder().applySuspend { argument() }.build())
        val mapped = of(toBeMapped)
        this.accesses = mapped
    }

    /**
     * @param values An array of objects that define dataset access for one or more entities.
     * Structure is documented below.
     */
    @JvmName("urhgmqeaahrjwtmb")
    public suspend fun accesses(vararg values: DatasetAccessArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.accesses = mapped
    }

    /**
     * @param value A unique ID for this dataset, without the project name. The ID
     * must contain only letters (a-z, A-Z), numbers (0-9), or
     * underscores (_). The maximum length is 1,024 characters.
     * - - -
     */
    @JvmName("krkywjbkotjevrsh")
    public suspend fun datasetId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.datasetId = mapped
    }

    /**
     * @param value Defines the default collation specification of future tables created
     * in the dataset. If a table is created in this dataset without table-level
     * default collation, then the table inherits the dataset default collation,
     * which is applied to the string fields that do not have explicit collation
     * specified. A change to this field affects only tables created afterwards,
     * and does not alter the existing tables.
     * The following values are supported:
     * - 'und:ci': undetermined locale, case insensitive.
     * - '': empty string. Default to case-sensitive behavior.
     */
    @JvmName("nbvlabbcyhfohdvv")
    public suspend fun defaultCollation(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.defaultCollation = mapped
    }

    /**
     * @param value The default encryption key for all tables in the dataset. Once this property is set,
     * all newly-created partitioned tables in the dataset will have encryption key set to
     * this value, unless table creation request (or query) overrides the key.
     * Structure is documented below.
     */
    @JvmName("lyayivpcuisnfaxf")
    public suspend fun defaultEncryptionConfiguration(`value`: DatasetDefaultEncryptionConfigurationArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.defaultEncryptionConfiguration = mapped
    }

    /**
     * @param argument The default encryption key for all tables in the dataset. Once this property is set,
     * all newly-created partitioned tables in the dataset will have encryption key set to
     * this value, unless table creation request (or query) overrides the key.
     * Structure is documented below.
     */
    @JvmName("jatfifnlfbmhmsrh")
    public suspend fun defaultEncryptionConfiguration(argument: suspend DatasetDefaultEncryptionConfigurationArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetDefaultEncryptionConfigurationArgsBuilder().applySuspend {
            argument()
        }.build()
        val mapped = of(toBeMapped)
        this.defaultEncryptionConfiguration = mapped
    }

    /**
     * @param value The default partition expiration for all partitioned tables in
     * the dataset, in milliseconds.
     * Once this property is set, all newly-created partitioned tables in
     * the dataset will have an `expirationMs` property in the `timePartitioning`
     * settings set to this value, and changing the value will only
     * affect new tables, not existing ones. The storage in a partition will
     * have an expiration time of its partition time plus this value.
     * Setting this property overrides the use of `defaultTableExpirationMs`
     * for partitioned tables: only one of `defaultTableExpirationMs` and
     * `defaultPartitionExpirationMs` will be used for any new partitioned
     * table. If you provide an explicit `timePartitioning.expirationMs` when
     * creating or updating a partitioned table, that value takes precedence
     * over the default partition expiration time indicated by this property.
     */
    @JvmName("fpetfjnfxxfynulg")
    public suspend fun defaultPartitionExpirationMs(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.defaultPartitionExpirationMs = mapped
    }

    /**
     * @param value The default lifetime of all tables in the dataset, in milliseconds.
     * The minimum value is 3600000 milliseconds (one hour).
     * Once this property is set, all newly-created tables in the dataset
     * will have an `expirationTime` property set to the creation time plus
     * the value in this property, and changing the value will only affect
     * new tables, not existing ones. When the `expirationTime` for a given
     * table is reached, that table will be deleted automatically.
     * If a table's `expirationTime` is modified or removed before the
     * table expires, or if you provide an explicit `expirationTime` when
     * creating a table, that value takes precedence over the default
     * expiration time indicated by this property.
     */
    @JvmName("cdwpouxrogbqdchw")
    public suspend fun defaultTableExpirationMs(`value`: Int?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.defaultTableExpirationMs = mapped
    }

    /**
     * @param value If set to `true`, delete all the tables in the
     * dataset when destroying the resource; otherwise,
     * destroying the resource will fail if tables are present.
     */
    @JvmName("nsvxcpcgdhxenmoy")
    public suspend fun deleteContentsOnDestroy(`value`: Boolean?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.deleteContentsOnDestroy = mapped
    }

    /**
     * @param value A user-friendly description of the dataset
     */
    @JvmName("hkorrkyuxeqjkrns")
    public suspend fun description(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.description = mapped
    }

    /**
     * @param value Information about the external metadata storage where the dataset is defined.
     * Structure is documented below.
     */
    @JvmName("cuhttkyxxkjjwqwp")
    public suspend fun externalDatasetReference(`value`: DatasetExternalDatasetReferenceArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.externalDatasetReference = mapped
    }

    /**
     * @param argument Information about the external metadata storage where the dataset is defined.
     * Structure is documented below.
     */
    @JvmName("dhcfikaafrdnytcd")
    public suspend fun externalDatasetReference(argument: suspend DatasetExternalDatasetReferenceArgsBuilder.() -> Unit) {
        val toBeMapped = DatasetExternalDatasetReferenceArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.externalDatasetReference = mapped
    }

    /**
     * @param value A descriptive name for the dataset
     */
    @JvmName("tnfatueqmakurpac")
    public suspend fun friendlyName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.friendlyName = mapped
    }

    /**
     * @param value TRUE if the dataset and its table names are case-insensitive, otherwise FALSE.
     * By default, this is FALSE, which means the dataset and its table names are
     * case-sensitive. This field does not affect routine references.
     */
    @JvmName("ofvsgvnbjpdajkjw")
    public suspend fun isCaseInsensitive(`value`: Boolean?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.isCaseInsensitive = mapped
    }

    /**
     * @param value The labels associated with this dataset. You can use these to
     * organize and group your datasets.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("cevhsbdlcokahcvb")
    public suspend fun labels(`value`: Map?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param values The labels associated with this dataset. You can use these to
     * organize and group your datasets.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    @JvmName("jubwjmmbvsftbtqq")
    public fun labels(vararg values: Pair) {
        val toBeMapped = values.toMap()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.labels = mapped
    }

    /**
     * @param value The geographic location where the dataset should reside.
     * See [official docs](https://cloud.google.com/bigquery/docs/dataset-locations).
     * There are two types of locations, regional or multi-regional. A regional
     * location is a specific geographic place, such as Tokyo, and a multi-regional
     * location is a large geographic area, such as the United States, that
     * contains at least two geographic places.
     * The default value is multi-regional location `US`.
     * Changing this forces a new resource to be created.
     */
    @JvmName("pbexvgspudymoqdo")
    public suspend fun location(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.location = mapped
    }

    /**
     * @param value Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
     */
    @JvmName("uygbhfgrcmnwarqc")
    public suspend fun maxTimeTravelHours(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.maxTimeTravelHours = mapped
    }

    /**
     * @param value The ID of the project in which the resource belongs.
     * If it is not provided, the provider project is used.
     */
    @JvmName("ujyqgepypyxbbatp")
    public suspend fun project(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.project = mapped
    }

    /**
     * @param value Specifies the storage billing model for the dataset.
     * Set this flag value to LOGICAL to use logical bytes for storage billing,
     * or to PHYSICAL to use physical bytes instead.
     * LOGICAL is the default if this flag isn't specified.
     */
    @JvmName("gxmxfjwdupagvlvv")
    public suspend fun storageBillingModel(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.storageBillingModel = mapped
    }

    internal fun build(): DatasetArgs = DatasetArgs(
        accesses = accesses,
        datasetId = datasetId,
        defaultCollation = defaultCollation,
        defaultEncryptionConfiguration = defaultEncryptionConfiguration,
        defaultPartitionExpirationMs = defaultPartitionExpirationMs,
        defaultTableExpirationMs = defaultTableExpirationMs,
        deleteContentsOnDestroy = deleteContentsOnDestroy,
        description = description,
        externalDatasetReference = externalDatasetReference,
        friendlyName = friendlyName,
        isCaseInsensitive = isCaseInsensitive,
        labels = labels,
        location = location,
        maxTimeTravelHours = maxTimeTravelHours,
        project = project,
        storageBillingModel = storageBillingModel,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy